creating an instance of StringToTime with {@link #StringToTime(Object)}
\n *
\n *
\n * \n *
The static methods provide a UNIX-style timestamp, a {@link java.util.Date} instance, or a \n * {@link java.util.Calendar} instance. In the event the time expression provided is invalid, \n * these methods return Boolean.FALSE.
\n * \n *
Instances of StringToTime inherit from {@link java.util.Date}; so, when instantiated\n * with an expression that the algorithm recognizes, the resulting instance of StringToTime\n * can be passed to any method or caller requiring a {@link java.util.Date} object. Unlike the static methods,\n * attempting to create a StringToTime instance with an invalid expression of time \n * results in a {@link StringToTimeException}.
\n * \n *
Valid expressions of time
\n * \n *
All expressions are case-insensitive.
\n * \n *
\n *
now (equal to new Date())
\n *
today (equal to StringToTime(\"00:00:00.000\"))
\n *
midnight (equal to StringToTime(\"00:00:00.000 +24 hours\"))
\n *
morning or this morning (by default, equal toStringToTime(\"07:00:00.000\"))
\n *
noon (by default, equal to StringToTime(\"12:00:00.000\")
\n *
afternoon or this afternoon (by default, equal to StringToTime(\"13:00:00.000\")
\n *
evening or this evening (by default, equal to StringToTime(\"17:00:00.000\")
\n *
tonight (by default, equal to StringToTime(\"20:00:00.000\")
\n *
tomorrow (by default, equal to StringToTime(\"now +24 hours\"))
\n *
tomorrow morning (by default, equal to StringToTime(\"morning +24 hours\"))
\n *
noon tomorrow or tomorrow noon (by default, equal to StringToTime(\"noon +24 hours\"))
\n *
tomorrow afternoon (by default, equal to StringToTime(\"afternoon +24 hours\"))
\n *
yesterday (by default, equal to StringToTime(\"now -24 hours\"))
\n *
all the permutations of yesterday and morning, noon, afternoon, and evening
\n *
October 26, 1981 or Oct 26, 1981
\n *
October 26 or Oct 26
\n *
26 October 1981
\n *
26 Oct 1981
\n *
26 Oct 81
\n *
10/26/1981 or 10-26-1981
\n *
10/26/81 or 10-26-81
\n *
1981/10/26 or 1981-10-26
\n *
10/26 or 10-26
\n *
\n * \n * @author Aaron Collegeman acollegeman@clutch-inc.com\n * @since JRE 1.5.0\n * @see http://us3.php.net/manual/en/function.strtotime.php\n */\npublic class StringToTime extends Date {\n\n\tprivate static final long serialVersionUID = 7889493424407815134L;\n\n\tprivate static final Log log = LogFactory.getLog(StringToTime.class);\n\t\n\t// default SimpleDateFormat string is the standard MySQL date format\n\tprivate static final String defaultSimpleDateFormat = \"yyyy-MM-dd HH:mm:ss.SSS\";\n\t\n\t// An expression of time (hour)(:(minute))?((:(second))(.(millisecond))?)?( *(am?|pm?))?(RFC 822 time zone|general time zone)?\n\tprivate static final String timeExpr = \"(\\\\d{1,2})(:(\\\\d{1,2}))?(:(\\\\d{1,2})(\\\\.(\\\\d{1,3}))?)?( *(am?|pm?))?( *\\\\-\\\\d{4}|[a-z]{3}|[a-z ]+)?\";\n\t\n\t/** Patterns and formats recognized by the algorithm; first match wins, so insert most specific patterns first. */\n\tprivate static final PatternAndFormat[] known = {\n\t\t\n\t\t// TODO: ISO 8601 and derivatives\n\t\t\n\t\t// just the year\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\d{4}\"),\n\t\t\tnew Format(FormatType.YEAR)\n\t\t),\n\t\t\n\t\t// decrement, e.g., -1 day\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\-( *\\\\d{1,} +[^ ]+){1,}\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.DECREMENT)\n\t\t),\n\t\t\n\t\t// increment, e.g., +1 day\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\+?( *\\\\d{1,} +[^ ]+){1,}\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.INCREMENT)\n\t\t),\n\t\t\n\t\t// e.g., October 26 and Oct 26\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"([a-z]+) +(\\\\d{1,2})\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.MONTH_AND_DATE)\n\t\t),\n\t\t\n\t\t// e.g., 26 October 1981, or 26 Oct 1981, or 26 Oct 81\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\d{1,2} +[a-z]+ +(\\\\d{2}|\\\\d{4})\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(\"d MMM y\")\t\n\t\t),\n\t\t\n\t\t// now or today\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"(midnight|now|today|(this +)?(morning|afternoon|evening)|tonight|noon( +tomorrow)?|tomorrow|tomorrow +(morning|afternoon|evening|night|noon)?|yesterday|yesterday +(morning|afternoon|evening|night)?)\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.WORD)\n\t\t),\n\t\t\n\t\t// time, 24-hour and 12-hour\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(timeExpr, Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.TIME)\n\t\t),\n\t\t\n\t\t// e.g., October 26, 1981 or Oct 26, 1981\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"[a-z]+ +\\\\d{1,2} *, *(\\\\d{2}|\\\\d{4})\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(\"MMM d, y\")\n\t\t),\n\t\t\n\t\t// e.g., 10/26/1981 or 10/26/81\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\d{1,2}/\\\\d{1,2}/\\\\d{2,4}\"),\n\t\t\tnew Format(\"M/d/y\")\n\t\t),\n\t\t\n\t\t// e.g., 10-26-1981 or 10-26-81\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\d{1,2}\\\\-\\\\d{1,2}\\\\-\\\\d{2,4}\"),\n\t\t\tnew Format(\"M-d-y\")\n\t\t),\n\t\t\n\t\t// e.g., 10/26 or 10-26\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"(\\\\d{1,2})(/|\\\\-)(\\\\d{1,2})\"),\n\t\t\tnew Format(FormatType.MONTH_AND_DATE_WITH_SLASHES)\n\t\t),\n\t\t\n\t\t// e.g., 1981/10/26\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\d{4}/\\\\d{1,2}/\\\\d{1,2}\"),\n\t\t\tnew Format(\"y/M/d\")\n\t\t),\n\t\t\n\t\t// e.g., 1981-10-26\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"\\\\d{4}\\\\-\\\\d{1,2}\\\\-\\\\d{1,2}\"),\n\t\t\tnew Format(\"y-M-d\")\n\t\t),\n\t\t\n\t\t// e.g., October or Oct\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"(Jan(uary)?|Feb(ruary)?|Mar(ch)?|Apr(il)?|May|Jun(e)?|Jul(y)?|Aug(ust)?|Sep(tember)?|Oct(ober)?|Nov(ember)?|Dec(ember)?)\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.MONTH)\n\t\t),\n\t\t\n\t\t// e.g., Tuesday or Tue\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"(Sun(day)?|Mon(day)?|Tue(sday)?|Wed(nesday)?|Thu(rsday)?|Fri(day)?|Sat(urday)?)\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.DAY_OF_WEEK)\n\t\t),\n\t\t\t\t\n\t\t// next, e.g., next Tuesday\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"next +(.*)\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.NEXT)\n\t\t),\n\t\t\n\t\t// last, e.g., last Tuesday\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"last +(.*)\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.LAST)\n\t\t),\n\t\t\n\t\t// compound statement\n\t\tnew PatternAndFormat(\n\t\t\tPattern.compile(\"(.*) +(((\\\\+|\\\\-){1}.*)|\"+timeExpr+\")$\", Pattern.CASE_INSENSITIVE),\n\t\t\tnew Format(FormatType.COMPOUND)\n\t\t)\n\t\t\n\t\t\n\t};\n\t\n\t/** Date/Time string parsed */\n\tprivate Object dateTimeString;\n\t\n\t/** The format to use in {@link #toString()) */\n\tprivate String simpleDateFormat;\n\t\n\t/** The {@link java.util.Date} interpreted from {@link #dateTimeString}, or {@link java.lang.Boolean} false */\n\tprivate Object date;\n\t\n\t\n\tpublic StringToTime() {\n\t\tsuper();\n\t\tthis.date = new Date(this.getTime());\n\t}\n\t\n\tpublic StringToTime(Date date) {\n\t\tsuper(date.getTime());\n\t\tthis.date = new Date(this.getTime());\n\t}\n\t\n\tpublic StringToTime(Object dateTimeString) {\n\t\tthis(dateTimeString, new Date(), defaultSimpleDateFormat);\n\t}\n\t\n\tpublic StringToTime(Object dateTimeString, String simpleDateFormat) {\n\t\tthis(dateTimeString, new Date(), simpleDateFormat);\n\t}\n\t\n\tpublic StringToTime(Object dateTimeString, Date now) {\n\t\tthis(dateTimeString, now, defaultSimpleDateFormat);\n\t}\n\t\n\tpublic StringToTime(Object dateTimeString, Long now) {\n\t\tthis(dateTimeString, new Date(now), defaultSimpleDateFormat);\n\t}\n\t\n\tpublic StringToTime(Object dateTimeString, Integer now) {\n\t\tthis(dateTimeString, new Date(new Long(now)), defaultSimpleDateFormat);\n\t}\n\t\n\tpublic StringToTime(Object dateTimeString, Date now, String simpleDateFormat) {\n\t\tsuper(0);\n\t\tassert dateTimeString != null;\n assert now != null;\n assert simpleDateFormat != null;\n\t\t\n\t\tthis.dateTimeString = dateTimeString;\n\t\tthis.simpleDateFormat = simpleDateFormat;\n\t\t\n\t\tdate = StringToTime.date(dateTimeString, now);\n\t\tif (!Boolean.FALSE.equals(date))\n\t\t\tsetTime(((Date) date).getTime());\n\t\telse\n\t\t\tthrow new StringToTimeException(dateTimeString);\n\t}\n\t\n\t/**\n\t * @return {@link java.util.Date#getTime()}\n\t */\n\tpublic long getTime() {\n\t\treturn super.getTime();\n\t}\n\t\n\t/**\n\t * @return Calendar set to timestamp {@link java.util.Date#getTime()}\n\t */\n\tpublic Calendar getCal() {\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTimeInMillis(super.getTime());\n\t\treturn cal;\n\t}\n\t\n\t/**\n\t * @param simpleDateFormat\n\t * @see {@link SimpleDateFormat}\n\t * @return Date formatted according to simpleDateFormat\n\t */\n\tpublic String format(String simpleDateFormat) {\n\t\treturn new SimpleDateFormat(simpleDateFormat).format(this);\n\t}\n\t\n\t/**\n\t * @return If {@link #simpleDateFormat} provided in constructor, then attempts to format date\n\t * accordingly; otherwise, returns the String value of {@link java.util.Date#getTime()}.\n\t */\n\tpublic String toString() {\n\t\tif (simpleDateFormat != null)\n\t\t\treturn new SimpleDateFormat(simpleDateFormat).format(this);\n\t\telse\n\t\t\treturn new SimpleDateFormat(\"yyyy/dd/MM\").format(this); //String.valueOf(super.getTime());\n\t}\n\t\n\t\n\t/**\n\t * A single parameter version of {@link #time(String, Date)}, passing a new instance of {@link java.util.Date} as the\n\t * second parameter.\n\t * @param dateTimeString\n\t * @return A {@link java.lang.Long} timestamp representative of dateTimeString, or {@link java.lang.Boolean} false.\n\t * @see #time(String, Date)\n\t */\n\tpublic static Object time(Object dateTimeString) {\n\t\treturn time(dateTimeString, new Date());\n\t}\n\t\n\t/**\n\t * Parse dateTimeString and produce a timestamp. \n\t * @param dateTimeString\n\t * @param now \n\t * @return
\n\t *
If equal to "now", return the number of milliseconds since January 1, 1970 or the value of now.
\n\t *
If an incremental or decremental statement, e.g., +1 hour or -1 week, or a composite thereof, e.g., +1 hour 1 minute 1 second,\n\t * returns a date equal to the increment/decrement plus the value of now.\n\t *
\n\t */\n\tpublic static Object time(Object dateTimeString, Date now) {\n\t\ttry {\n\t\t\tif (dateTimeString == null)\n\t\t\t\treturn Boolean.FALSE;\n\t\t\telse {\n\t\t\t\tString trimmed = String.valueOf(dateTimeString).trim();\n\t\t\t\tfor(PatternAndFormat paf : known) {\n\t\t\t\t\tMatcher m = paf.matches(trimmed);\n\t\t\t\t\tif (m.matches()) {\n\t\t\t\t\t\tLong time = paf.parse(trimmed, now, m);\n\t\t\t\t\t\t//System.out.println(String.format(\"[%s] triggered format [%s]: %s\", dateTimeString, paf.f, new Date(time)));\n\t\t\t\t\t\tif (log.isDebugEnabled())\n\t\t\t\t\t\t\tlog.debug(String.format(\"[%s] triggered format [%s]: %s\", dateTimeString, paf.f, new Date(time)));\n\t\t\t\t\t\treturn time;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\t// no match\n\t\t\t\tif (log.isDebugEnabled())\n\t\t\t\t\tlog.debug(String.format(\"Unrecognized date/time string [%s]\", dateTimeString));\n\t\t\t\treturn Boolean.FALSE;\n\t\t\t}\n\t\t} catch (Exception e) { // thrown by various features of the parser\n\t\t\tif (!Boolean.parseBoolean(System.getProperty(StringToTime.class+\".EXCEPTION_ON_PARSE_FAILURE\", \"false\"))) {\n\t\t\t\tif (log.isDebugEnabled())\n\t\t\t\t\tlog.debug(String.format(\"Failed to parse [%s] into a java.util.Date instance\", dateTimeString));\n\t\t\t\treturn Boolean.FALSE;\n\t\t\t}\n\t\t\telse\n\t\t\t\tthrow new StringToTimeException(dateTimeString, e);\n\t\t}\n\t}\n\t\n\tprivate static ParserResult getParserResult(String trimmedDateTimeString, Date now) throws ParseException {\n\t\tfor(PatternAndFormat paf : known) {\n\t\t\tMatcher m = paf.matches(trimmedDateTimeString);\n\t\t\tif (m.matches()) {\n\t\t\t\tlog.debug(String.format(\"Date/time string [%s] triggered format [%s]\", trimmedDateTimeString, paf.f));\n\t\t\t\treturn new ParserResult(paf.parse(trimmedDateTimeString, now, m), paf.f.type);\n\t\t\t}\n\t\t}\n\t\t\n\t\treturn null;\n\t}\n\t\n\tpublic static Object date(Object dateTimeString) {\n\t\treturn date(dateTimeString, new Date());\n\t}\n\t\n\tpublic static Object date(Object dateTimeString, Date now) {\n\t\tObject time = time(dateTimeString, now);\n\t\treturn (Boolean.FALSE.equals(time)) ? Boolean.FALSE : new Date((Long) time);\n\t}\n\t\n\tpublic static Object cal(Object dateTimeString) {\n\t\treturn cal(dateTimeString, new Date());\n\t}\n\t\n\tpublic static Object cal(Object dateTimeString, Date now) {\n\t\tObject date = date(dateTimeString, now);\n\t\tif (Boolean.FALSE.equals(date))\n\t\t\treturn Boolean.FALSE;\n\t\telse {\n\t\t\tCalendar cal = Calendar.getInstance();\n\t\t\tcal.setTime((Date) date);\n\t\t\treturn cal;\n\t\t}\n\t}\n\t\n\tprivate static class PatternAndFormat {\n\t\tpublic Pattern p;\n\t\tpublic Format f;\n\t\t\n\t\tpublic PatternAndFormat(Pattern p, Format f) {\n\t\t\tthis.p = p;\n\t\t\tthis.f = f;\n\t\t}\n\t\t\n\t\tpublic Matcher matches(String dateTimeString) {\n\t\t\treturn p.matcher(dateTimeString);\n\t\t}\n\t\t\n\t\tpublic Long parse(String dateTimeString, Date now, Matcher m) throws ParseException {\n\t\t\treturn f.parse(dateTimeString, now, m).getTime();\n\t\t}\n\t}\n\t\n\tprivate static class ParserResult {\n\t\tpublic FormatType type;\n\t\tpublic Long timestamp;\n\t\t\n\t\tpublic ParserResult(Long timestamp, FormatType type) {\n\t\t\tthis.timestamp = timestamp;\n\t\t\tthis.type = type;\n\t\t}\n\t}\n\t\n\tprivate static class Format {\n\t\t\n\t\tprivate static Pattern unit = Pattern.compile(\"(\\\\d{1,}) +(s(ec(ond)?)?|mo(n(th)?)?|(hour|hr?)|d(ay)?|(w(eek)?|wk)|m(in(ute)?)?|(y(ear)?|yr))s?\");\n\t\t\n\t\tprivate static Pattern removeExtraSpaces = Pattern.compile(\" +\");\n\t\t\n\t\tprivate static Map translateDayOfWeek = new HashMap();\n\t\t\n\t\tstatic {\n\t\t\ttranslateDayOfWeek.put(\"sunday\", 1);\n\t\t\ttranslateDayOfWeek.put(\"sun\", 1);\n\t\t\ttranslateDayOfWeek.put(\"monday\", 2);\n\t\t\ttranslateDayOfWeek.put(\"mon\", 2);\n\t\t\ttranslateDayOfWeek.put(\"tuesday\", 3);\n\t\t\ttranslateDayOfWeek.put(\"tue\", 3);\n\t\t\ttranslateDayOfWeek.put(\"wednesday\", 4);\n\t\t\ttranslateDayOfWeek.put(\"wed\", 4);\n\t\t\ttranslateDayOfWeek.put(\"thursday\", 5);\n\t\t\ttranslateDayOfWeek.put(\"thu\", 5);\n\t\t\ttranslateDayOfWeek.put(\"friday\", 6);\n\t\t\ttranslateDayOfWeek.put(\"fri\", 6);\n\t\t\ttranslateDayOfWeek.put(\"saturday\", 7);\n\t\t\ttranslateDayOfWeek.put(\"sat\", 7);\n\t\t}\n\t\t\n\t\tprivate String sdf;\n\t\t\n\t\tprivate FormatType type;\n\t\t\n\t\tpublic Format(FormatType type) {\n\t\t\tthis.type = type;\n\t\t}\n\t\t\n\t\tpublic Format(String sdf) {\n\t\t\tthis.sdf = sdf;\n\t\t}\n\t\t\n\t\tpublic String toString() {\n\t\t\tif (sdf != null)\n\t\t\t\treturn sdf;\n\t\t\telse\n\t\t\t\treturn type.toString();\n\t\t}\n\t\t \n\t\tpublic Date parse(String dateTimeString, Date now, Matcher m) throws ParseException {\n\t\t\tif (sdf != null)\n\t\t\t\treturn new SimpleDateFormat(sdf).parse(dateTimeString);\n\t\t\telse {\n\t\t\t\tdateTimeString = removeExtraSpaces.matcher(dateTimeString).replaceAll(\" \").toLowerCase();\n\t\t\t\t\n\t\t\t\ttry {\n\t\t\t\t\tCalendar cal = Calendar.getInstance();\n\t\t\t\t\tcal.setTime(now);\n\t\t\t\t\t\n\t\t\t\t\t// word expressions, e.g., \"now\" and \"today\" and \"tonight\"\n\t\t\t\t\tif (type == FormatType.WORD) {\n\t\t\t\t\t\tif (\"now\".equals(dateTimeString))\n\t\t\t\t\t\t\treturn (now != null ? now : new Date());\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"today\".equals(dateTimeString)) {\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"morning\".equals(dateTimeString) || \"this morning\".equals(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, this morning begins at 07:00:00.000\n\t\t\t\t\t\t\tint thisMorningBeginsAt = Integer.parseInt(System.getProperty(StringToTime.class+\".THIS_MORNING_BEGINS_AT\", \"7\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, thisMorningBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"noon\".equals(dateTimeString)) {\n\t\t\t\t\t\t\t// noon is 12:00:00.000\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, 12);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"afternoon\".equals(dateTimeString) || \"this afternoon\".equals(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, this afternoon begins at 13:00:00.000\n\t\t\t\t\t\t\tint thisAfternoonBeginsAt = Integer.parseInt(System.getProperty(StringToTime.class+\".THIS_AFTERNOON_BEGINS_AT\", \"13\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, thisAfternoonBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"evening\".equals(dateTimeString) || \"this evening\".equals(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, this evening begins at 17:00:00.000\n\t\t\t\t\t\t\tint thisEveningBeginsAt = Integer.parseInt(System.getProperty(StringToTime.class+\".THIS_EVENING_BEGINS_AT\", \"17\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, thisEveningBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"tonight\".equals(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, tonight begins at 20:00:00.000\n\t\t\t\t\t\t\tint tonightBeginsAt = Integer.parseInt(System.getProperty(StringToTime.class+\".TONIGHT_BEGINS_AT\", \"20\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, tonightBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"midnight\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"00:00:00 +24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"tomorrow\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"now +24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"tomorrow morning\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"morning +24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"tomorrow noon\".equals(dateTimeString) || \"noon tomorrow\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"noon +24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"tomorrow afternoon\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"afternoon +24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"tomorrow evening\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"evening +24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"tomorrow night\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"tonight +24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"yesterday\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"now -24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"yesterday morning\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"morning -24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"yesterday noon\".equals(dateTimeString) || \"noon yesterday\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"noon -24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"yesterday afternoon\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"afternoon -24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"yesterday evening\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"evening -24 hours\", now);\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse if (\"yesterday night\".equals(dateTimeString)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"tonight -24 hours\", now);\n\t\t\t\t\t\t}\n \t\t\t\t\t\t\n \t\t\t\t\t\t\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tthrow new ParseException(String.format(\"Unrecognized date word: %s\", dateTimeString), 0);\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// time expressions, 24-hour and 12-hour\n\t\t\t\t\telse if (type == FormatType.TIME) {\n\t\t\t\t\t\t// An expression of time (hour)(:(minute))?((:(second))(.(millisecond))?)?( *(am?|pm?))?(RFC 822 time zone|general time zone)?\n\t\t\t\t\t\tString hour = m.group(1);\n\t\t\t\t\t\tString min = m.group(3);\n\t\t\t\t\t\tString sec = m.group(5);\n\t\t\t\t\t\tString ms = m.group(7);\n\t\t\t\t\t\tString amOrPm = m.group(8);\n\t\t\t\t\t\t\n\t\t\t\t\t\tif (hour != null) {\n\t\t\t\t\t\t\tif (amOrPm != null)\n\t\t\t\t\t\t\t\tcal.set(Calendar.HOUR, new Integer(hour));\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, new Integer(hour));\n\t\t\t\t\t\t}\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR, 0);\n\t\t\t\t\t\t\n\t\t\t\t\t\tcal.set(Calendar.MINUTE, (min != null ? new Integer(min) : 0));\n\t\t\t\t\t\tcal.set(Calendar.SECOND, (sec != null ? new Integer(sec) : 0));\n\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, (ms != null ? new Integer(ms) : 0));\n\t\t\t\t\t\t\n\t\t\t\t\t\tif (amOrPm != null)\n\t\t\t\t\t\t\tcal.set(Calendar.AM_PM, (amOrPm.equals(\"a\") || amOrPm.equals(\"am\") ? Calendar.AM : Calendar.PM));\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// increments\n\t\t\t\t\telse if (type == FormatType.INCREMENT || type == FormatType.DECREMENT) {\n\t\t\t\t\t\tMatcher units = unit.matcher(dateTimeString);\n\t\t\t\t\t\twhile (units.find()) {\n\t\t\t\t\t\t\tInteger val = new Integer(units.group(1)) * (type == FormatType.DECREMENT ? -1 : 1);\n\t\t\t\t\t\t\tString u = units.group(2);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// second\n\t\t\t\t\t\t\tif (\"s\".equals(u) || \"sec\".equals(u) || \"second\".equals(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.SECOND, cal.get(Calendar.SECOND)+val);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// minute\n\t\t\t\t\t\t\telse if (\"m\".equals(u) || \"min\".equals(u) || \"minute\".equals(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, cal.get(Calendar.MINUTE)+val);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// hour\n\t\t\t\t\t\t\telse if (\"h\".equals(u) || \"hr\".equals(u) || \"hour\".equals(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY)+val);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// day\n\t\t\t\t\t\t\telse if (\"d\".equals(u) || \"day\".equals(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE)+val);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// week\n\t\t\t\t\t\t\telse if (\"w\".equals(u) || \"wk\".equals(u) || \"week\".equals(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR, cal.get(Calendar.WEEK_OF_YEAR)+val);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// month\n\t\t\t\t\t\t\telse if (\"mo\".equals(u) || \"mon\".equals(u) || \"month\".equals(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.MONTH, cal.get(Calendar.MONTH)+val);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// year\n\t\t\t\t\t\t\telse if (\"y\".equals(u) || \"yr\".equals(u) || \"year\".equals(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.YEAR, cal.get(Calendar.YEAR)+val);\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tthrow new IllegalArgumentException(String.format(\"Unrecognized %s unit: [%s]\", type, u));\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// compound expressions\n\t\t\t\t\telse if (type == FormatType.COMPOUND) {\n\t\t\t\t\t\tObject date = StringToTime.date(m.group(1), now);\n\t\t\t\t\t\tif (!Boolean.FALSE.equals(date))\n\t\t\t\t\t\t\treturn (Date) StringToTime.date(m.group(2), (Date) date);\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tthrow new IllegalArgumentException(String.format(\"Couldn't parse %s, so couldn't compound with %s\", m.group(1), m.group(2)));\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// month of the year\n\t\t\t\t\telse if (type == FormatType.MONTH) {\n\t\t\t\t\t\tCalendar ref = Calendar.getInstance();\n\t\t\t\t\t\tref.setTime(new SimpleDateFormat(\"MMM d, y\").parse(String.format(\"%s 1, 1970\", m.group(1))));\n\t\t\t\t\t\tcal.set(Calendar.MONTH, ref.get(Calendar.MONTH));\n\t\t\t\t\t\t\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// day of week\n\t\t\t\t\telse if (type == FormatType.DAY_OF_WEEK) {\n\t\t\t\t\t\tInteger ref = translateDayOfWeek.get(dateTimeString);\n\t\t\t\t\t\tif (cal.get(Calendar.DAY_OF_WEEK) >= ref)\n\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR, cal.get(Calendar.WEEK_OF_YEAR)+1);\n\t\t\t\t\t\tcal.set(Calendar.DAY_OF_WEEK, ref);\n\t\t\t\t\t\t\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// month and day with slashes\n\t\t\t\t\telse if (type == FormatType.MONTH_AND_DATE_WITH_SLASHES) {\n\t\t\t\t\t\tCalendar ref = Calendar.getInstance();\n\t\t\t\t\t\tref.setTime(new SimpleDateFormat(\"M/d/y\").parse(String.format(\"%s/%s/1970\", m.group(1), m.group(3))));\n\t\t\t\t\t\tcal.set(Calendar.MONTH, ref.get(Calendar.MONTH));\n\t\t\t\t\t\tcal.set(Calendar.DATE, ref.get(Calendar.DATE));\n\t\t\t\t\t\t\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// month and day long-hand\n\t\t\t\t\telse if (type == FormatType.MONTH_AND_DATE) {\n\t\t\t\t\t\tCalendar ref = Calendar.getInstance();\n\t\t\t\t\t\tref.setTime(new SimpleDateFormat(\"MMM d, y\").parse(String.format(\"%s %s, 1970\", m.group(1), m.group(2))));\n\t\t\t\t\t\tcal.set(Calendar.MONTH, ref.get(Calendar.MONTH));\n\t\t\t\t\t\tcal.set(Calendar.DATE, ref.get(Calendar.DATE));\n\t\t\t\t\t\t\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// next X\n\t\t\t\t\telse if (type == FormatType.NEXT) {\n\t\t\t\t\t\t// Format types MONTH and DAY_OF_WEEK both return future dates, so no additional processing is needed\n\t\t\t\t\t\tString expr = m.group(1);\n\t\t\t\t\t\tParserResult parsed = StringToTime.getParserResult(expr, now);\n\t\t\t\t\t\t\n\t\t\t\t\t\tif (parsed != null && ( FormatType.MONTH.equals(parsed.type) || FormatType.DAY_OF_WEEK.equals(parsed.type) || FormatType.MONTH_AND_DATE.equals(parsed.type)) ) \n\t\t\t\t\t\t\treturn new Date(parsed.timestamp);\n\t\t\t\t\t\telse {\n\t\t\t\t\t\t\tif (\"week\".equals(expr)) \n\t\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR, cal.get(Calendar.WEEK_OF_YEAR)+1);\n\t\t\t\t\t\t\telse if (\"month\".equals(expr)) \n\t\t\t\t\t\t\t\tcal.set(Calendar.MONTH, cal.get(Calendar.MONTH)+1);\n\t\t\t\t\t\t\telse if (\"year\".equals(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.YEAR, cal.get(Calendar.YEAR)+1);\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tthrow new IllegalArgumentException(String.format(\"Invalid expression of time: %s\", dateTimeString));\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// last X\n\t\t\t\t\telse if (type == FormatType.LAST) {\n\t\t\t\t\t\tString expr = m.group(1);\n\t\t\t\t\t\tParserResult parsed = StringToTime.getParserResult(expr, now);\n\t\t\t\t\t\t\n\t\t\t\t\t\tif (parsed != null && (FormatType.MONTH.equals(parsed.type) || FormatType.MONTH_AND_DATE.equals(parsed.type))) {\n\t\t\t\t\t\t\treturn new StringToTime(\"-1 year\", new Date(parsed.timestamp));\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\n\t\t\t\t\t\telse if (parsed != null && FormatType.DAY_OF_WEEK.equals(parsed.type)) {\n\t\t\t\t\t\t\treturn new StringToTime(\"-1 week\", new Date(parsed.timestamp));\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\telse {\n\t\t\t\t\t\t\tif (\"week\".equals(expr)) \n\t\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR, cal.get(Calendar.WEEK_OF_YEAR)-1);\n\t\t\t\t\t\t\telse if (\"month\".equals(expr)) \n\t\t\t\t\t\t\t\tcal.set(Calendar.MONTH, cal.get(Calendar.MONTH)-1);\n\t\t\t\t\t\t\telse if (\"year\".equals(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.YEAR, cal.get(Calendar.YEAR)-1);\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tthrow new IllegalArgumentException(String.format(\"Invalid expression of time: %s\", dateTimeString));\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// year\n\t\t\t\t\telse if (type == FormatType.YEAR) {\n\t\t\t\t\t\tcal.set(Calendar.YEAR, new Integer(m.group(0)));\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t// unimplemented format type\n\t\t\t\t\telse\n\t\t\t\t\t\tthrow new IllegalStateException(String.format(\"Unimplemented FormatType: %s\", type));\n\t\t\t\t} catch (ParseException e) {\n\t\t\t\t\tthrow e;\n\t\t\t\t} catch (IllegalStateException e) {\n\t\t\t\t\tthrow e;\n\t\t\t\t} catch (IllegalArgumentException e) {\n\t\t\t\t\tthrow e;\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tthrow new RuntimeException(String.format(\"Unknown failure in string-to-time conversion: %s\", e.getMessage()), e);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\t\n\tprivate enum FormatType {\n\t\tCOMPOUND,\n\t\tMONTH_AND_DATE_WITH_SLASHES,\n\t\tMONTH_AND_DATE,\n\t\tMONTH,\n\t\tDAY_OF_WEEK,\n\t\tNEXT,\n\t\tLAST,\n\t\tINCREMENT,\n\t\tDECREMENT,\n\t\tWORD,\n\t\tTIME,\n\t\tYEAR\n\t}\n\t\n}\n", "src/main/java/com/clutch/dates/StringToTimeException.java": "package com.clutch.dates;\n\npublic class StringToTimeException extends RuntimeException {\n\t\n\tprivate static final long serialVersionUID = -3777846121104246071L;\n\n\tpublic StringToTimeException(Object dateTimeString) {\n\t\tsuper(String.format(\"Failed to parse [%s] into a java.util.Date\", dateTimeString));\n\t}\n\t\n\tpublic StringToTimeException(Object dateTimeString, Throwable cause) {\n\t\tsuper(String.format(\"Failed to parse [%s] into a java.util.Date\", dateTimeString), cause);\n\t}\n\n}\n", "src/test/java/com/clutch/dates/StringToTimeTest.java": "package com.clutch.dates;\n\nimport java.text.SimpleDateFormat;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.GregorianCalendar;\n\nimport junit.framework.TestCase;\n\nimport org.springframework.beans.BeanWrapper;\nimport org.springframework.beans.BeanWrapperImpl;\n\npublic class StringToTimeTest extends TestCase {\n\n\tpublic void testMySqlDateFormat() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\t\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tcal.set(Calendar.YEAR, 1981);\n\t\tcal.set(Calendar.HOUR_OF_DAY, 15);\n\t\tcal.set(Calendar.MINUTE, 26);\n\t\tcal.set(Calendar.SECOND, 3);\n\t\tcal.set(Calendar.MILLISECOND, 435);\n\t\t\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"1981-10-26 15:26:03.435\", now));\n\t}\n\t\n\t/* FIXME\n\tpublic void testISO8601() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\t\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tcal.set(Calendar.YEAR, 1981);\n\t\tcal.set(Calendar.HOUR_OF_DAY, 15);\n\t\tcal.set(Calendar.MINUTE, 25);\n\t\tcal.set(Calendar.SECOND, 2);\n\t\tcal.set(Calendar.MILLISECOND, 435);\n\t\t\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"1981-10-26T15:26:03.435ZEST\", now));\n\t}\n\t*/\n\t\n\tpublic void test1200Seconds() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\t\n\t\tcal.set(Calendar.SECOND, cal.get(Calendar.SECOND)+1200);\n\t\tassertTrue(new Date(cal.getTimeInMillis()).equals(new StringToTime(\"+1200 s\", now)));\n\t\tassertFalse(new Date(cal.getTimeInMillis()).equals(new StringToTime(\"+1 s\", now)));\n\t}\n\t\n\tpublic void testVariousExpressionsOfTimeOfDay() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\t\n\t\tcal.set(Calendar.HOUR_OF_DAY, 23);\n\t\tcal.set(Calendar.MINUTE, 59);\n\t\tcal.set(Calendar.SECOND, 59);\n\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"11:59:59 PM\", now));\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"23:59:59\", now));\n\t\t\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"23:59\", now));\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"11:59 PM\", now));\n\t\t\n\t\tcal.set(Calendar.MILLISECOND, 123);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"23:59:00.123\"));\n\t\t\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tcal.set(Calendar.YEAR, 1981);\n\t\tcal.set(Calendar.HOUR_OF_DAY, 15);\n\t\tcal.set(Calendar.MINUTE, 27);\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"October 26, 1981 3:27:00 PM\", now));\n\t\t\n\t\tcal.set(Calendar.HOUR, 5);\n\t\tcal.set(Calendar.MINUTE, 0);\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tcal.set(Calendar.AM_PM, Calendar.PM);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"10/26/81 5PM\", now));\n\t\t\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE)+1);\n\t\tcal.set(Calendar.HOUR, 5);\n\t\tcal.set(Calendar.MINUTE, 0);\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\tcal.set(Calendar.AM_PM, Calendar.PM);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"tomorrow 5PM\", now));\n\t\t\n\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE)-2);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"yesterday 5PM\", now));\n\t\tassertEquals(new StringToTime(\"yesterday evening\", now), new StringToTime(\"yesterday 5PM\", now));\n\t}\n\t\n\tpublic void testStaticMethods() {\n\t\tDate now = new Date();\n\t\t\n\t\t// timestamp\n\t\tLong time = (Long) StringToTime.time(\"now\", now);\n\t\tassertEquals(new Date(now.getTime()), new Date(time));\n\t\t\n\t\t// calendar\n\t\tCalendar cal = (Calendar) StringToTime.cal(\"now\", now);\n\t\tassertEquals(new Date(now.getTime()), new Date(cal.getTimeInMillis()));\n\t\t\n\t\t// date\n\t\tDate date = (Date) StringToTime.date(\"now\", now);\n\t\tassertEquals(new Date(now.getTime()), date);\n\t}\n\n\tpublic void testInstancePattern() {\n\t\tStringToTime date = new StringToTime(\"26 October 1981\");\n\t\tBeanWrapper bean = new BeanWrapperImpl(date);\n\t\tCalendar cal = new GregorianCalendar(1981, Calendar.OCTOBER, 26);\n\t\tLong myBirthday = cal.getTimeInMillis();\n\t\t\n\t\t// string value of the StringToTime object is the timestamp\n\t\tassertEquals(myBirthday, new Long(date.getTime()));\n\t\t\n\t\t// formatting controlled by constructor\n\t\tdate = new StringToTime(\"26 October 1981\", \"d MMM yyyy\");\n\t\tassertEquals(\"26 Oct 1981\", date.toString());\n\t\tdate = new StringToTime(\"26 October 1981\", \"M/d/yy\");\n\t\tassertEquals(\"10/26/81\", date.toString());\n\t\t\n\t\t// time property\n\t\tassertEquals(myBirthday, bean.getPropertyValue(\"time\"));\n\t\t\n\t\t// date property\n\t\tDate now = new Date(myBirthday);\n\t\tassertEquals(now, date);\n\t\t\n\t\t// calendar property\n\t\tassertEquals(cal, bean.getPropertyValue(\"cal\"));\n\t\t\n\t\t// format on demand\n\t\tassertEquals(\"October 26, 1981\", date.format(\"MMMM d, yyyy\"));\n\t}\n\t\n\tpublic void testNow() {\n\t\tDate now = new Date();\n\t\tassertEquals(new Date(now.getTime()), new StringToTime(\"now\", now));\n\t}\n\t\n\tpublic void testToday() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"00:00:00.000\", now), new StringToTime(\"today\", now));\n\t}\n\t\n\tpublic void testThisMorning() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"07:00:00.000\", now), new StringToTime(\"this morning\", now));\n\t\tassertEquals(new StringToTime(\"morning\", now), new StringToTime(\"this morning\", now));\n\t}\n\t\n\tpublic void testNoon() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"12:00:00.000\", now), new StringToTime(\"noon\", now));\n\t}\n\t\n\tpublic void testThisAfternoon() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"13:00:00.000\", now), new StringToTime(\"this afternoon\", now));\n\t\tassertEquals(new StringToTime(\"afternoon\", now), new StringToTime(\"this afternoon\", now));\n\t}\n\t\n\tpublic void testThisEvening() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"17:00:00.000\", now), new StringToTime(\"this evening\", now));\n\t\tassertEquals(new StringToTime(\"evening\", now), new StringToTime(\"this evening\", now));\n\t}\n\t\n\tpublic void testTonight() {\n\t\tDate now = new Date();\n\t\tassertEquals(StringToTime.time(\"20:00:00.000\", now), StringToTime.time(\"tonight\", now));\n\t}\n\t\n\tpublic void testIncrements() {\n\t\tDate now = new Date();\n\t\t\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY)+1);\n\t\tassertEquals(cal.getTimeInMillis(), StringToTime.time(\"+1 hour\", now));\n\t\t\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.WEEK_OF_YEAR, cal.get(Calendar.WEEK_OF_YEAR)+52);\n\t\tassertEquals(cal.getTimeInMillis(), StringToTime.time(\"+52 weeks\", now));\n\t\t\n\t\tassertEquals(new StringToTime(\"1 year\", now), new StringToTime(\"+1 year\", now));\n\t\t\n\t\tassertEquals(new StringToTime(\"+1 year\", now), new StringToTime(\"+12 months\", now));\n\t\t\n\t\tassertEquals(new StringToTime(\"+1 year 6 months\", now), new StringToTime(\"+18 months\", now));\n\t\t\n\t\tassertEquals(new StringToTime(\"12 months 1 day 60 seconds\", now), new StringToTime(\"1 year 24 hours 1 minute\", now));\n\t}\n\t\n\tpublic void testDecrements() {\n\t\tDate now = new Date();\n\t\t\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY)-1);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"-1 hour\", now));\n\t}\n\t\n\tpublic void testTomorrow() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE)+1);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"tomorrow\", now));\n\t\tassertEquals(new StringToTime(\"now +24 hours\", now), new StringToTime(\"tomorrow\", now));\n\t}\n\t\n\tpublic void testTomorrowMorning() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"this morning +24 hours\", now), new StringToTime(\"tomorrow morning\", now));\n\t}\n\t\n\tpublic void testTomorrowNoon() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"noon +24 hours\", now), new StringToTime(\"tomorrow noon\", now));\n\t\tassertEquals(new StringToTime(\"noon +24 hours\", now), new StringToTime(\"noon tomorrow\", now));\n\t}\n\t\n\tpublic void testTomorrowAfternoon() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"this afternoon +24 hours\", now), new StringToTime(\"tomorrow afternoon\", now));\n\t}\n\t\n\tpublic void testTomorrowEvening() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"this evening +24 hours\", now), new StringToTime(\"tomorrow evening\", now));\n\t}\n\t\n\tpublic void testTomorrowNight() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"tonight +24 hours\", now), new StringToTime(\"tomorrow night\", now));\n\t}\n\t\n\t// e.g., October 26, 1981, or Oct 26, 1981, or 26 October 1981, or 26 Oct 1981, or 26 Oct 81\n\tpublic void testLongHand() throws Exception {\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"October 26, 1981\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"Oct 26, 1981\"));\n\t\t\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"26 October 1981\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"26 Oct 1981\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"26 Oct 81\"));\n\t\t\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"26 october 1981\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"26 oct 1981\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"26 oct 81\"));\n\t\t\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"), new StringToTime(\"1 Jan 2000\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"), new StringToTime(\"1 Jan 00\"));\n\t\t\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"), new StringToTime(\"1 jan 2000\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"), new StringToTime(\"1 jan 00\"));\n\t}\n\t\n\t// e.g., 10/26/1981 or 10/26/81\n\tpublic void testWithSlahesMonthFirst() throws Exception {\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"10/26/1981\"));\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"10/26/81\"));\n\t}\n\n\t// e.g., 1981/10/26\n\tpublic void testWithSlashesYearFirst() throws Exception {\n\t\tassertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"), new StringToTime(\"1981/10/26\"));\n\t}\n\t\n\t// e.g., October 26 and Oct 26\n\tpublic void testMonthAndDate() throws Exception {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"October 26\", now));\n\t\tassertEquals(new StringToTime(\"Oct 26\", now), new StringToTime(\"October 26\", now));\n\t}\n\t\n\t// e.g., 10/26\n\tpublic void testWithSlahesMonthAndDate() throws Exception {\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tassertEquals(new Date(cal.getTimeInMillis()), new StringToTime(\"10/26\"));\n\t}\n\t\n\t// e.g., October or Oct\n\tpublic void testMonth() throws Exception {\n\t\tDate now = new Date();\n\t\t\n\t\tassertEquals(new StringToTime(\"October\", now), new StringToTime(\"Oct\", now));\n\t\t\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\t\n\t\t// it should be this year\n\t\tassertEquals(cal.get(Calendar.YEAR), new StringToTime(\"January\", now).getCal().get(Calendar.YEAR));\n assertEquals(cal.get(Calendar.YEAR), new StringToTime(\"December\", now).getCal().get(Calendar.YEAR));\n\t}\n\t\n\tpublic void testDayOfWeek() throws Exception {\n\t\tDate now = new Date();\n\t\tassertEquals(StringToTime.date(\"Friday\", now), StringToTime.date(\"Fri\", now));\n\t\t\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\t\n\t\t// if today's day of the week is greater than or equal to our test day of the week (Wednesday)\n\t\tif (cal.get(Calendar.DAY_OF_WEEK) >= 3) // then the day of the week on the date returned should be next week\n\t\t\tassertEquals(cal.get(Calendar.WEEK_OF_YEAR)+1, new StringToTime(\"Wednesday\", now).getCal().get(Calendar.WEEK_OF_YEAR));\n\t\telse // otherwise, it should be this year\n\t\t\tassertEquals(cal.get(Calendar.WEEK_OF_YEAR), new StringToTime(\"Wednesday\", now).getCal().get(Calendar.WEEK_OF_YEAR));\n\t}\n\t\n\tpublic void testNext() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"next January 15\", now), new StringToTime(\"Jan 15\", now));\n\t\tassertEquals(new StringToTime(\"next Dec\", now), new StringToTime(\"December\", now));\n\t\tassertEquals(new StringToTime(\"next Sunday\", now), new StringToTime(\"Sun\", now));\n\t\tassertEquals(new StringToTime(\"next Sat\", now), new StringToTime(\"Saturday\", now));\n\t}\n\t\n\tpublic void testLast() {\n\t\tDate now = new Date();\n\t\tassertEquals(new StringToTime(\"last January 15\", now), new StringToTime(\"Jan 15 -1 year\", now));\n\t\tassertEquals(new StringToTime(\"last Dec\", now), new StringToTime(\"December -1 year\", now));\n\t\tassertEquals(new StringToTime(\"last Sunday\", now), new StringToTime(\"Sun -1 week\", now));\n\t\tassertEquals(new StringToTime(\"last Sat\", now), new StringToTime(\"Saturday -1 week\", now));\n\t}\n\t\n\t\n\t\n}\n"}, "files_after": {"src/main/java/com/clutch/dates/StringToTime.java": "package com.clutch.dates;\n\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.Calendar;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\n\nimport org.joda.time.DateTime;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport ch.qos.logback.classic.Level;\n\nimport com.google.common.base.Preconditions;\nimport com.google.common.base.Strings;\nimport com.google.common.base.Throwables;\n\n/**\n * A Java implementation of the PHP function strtotime(String, int): accepts\n * various expressions of time\n * in String format, and returns a timestamp or {@link DateTime}\n * object.\n * \n *
all the permutations of yesterday and morning,\n * noon, afternoon, and evening
\n *
October 26, 1981 or Oct 26, 1981
\n *
October 26 or Oct 26
\n *
26 October 1981
\n *
26 Oct 1981
\n *
26 Oct 81
\n *
10/26/1981 or 10-26-1981
\n *
10/26/81 or 10-26-81
\n *
1981/10/26 or 1981-10-26
\n *
10/26 or 10-26
\n *
\n * \n * @author Aaron Collegeman acollegeman@clutch-inc.com\n * @since JRE 1.5.0\n * @see http://us3.php.net/manual/en/function.strtotime.php\n */\npublic class StringToTime {\n\n\t/**\n\t * Parse {@code string} and return a {@link DateTime} object.\n\t * \n\t * @param string\n\t * @return the corresponding DateTime\n\t */\n\tpublic static DateTime parseDateTime(String string) {\n\t\treturn new DateTime(parseLong(string));\n\t}\n\n\t/**\n\t * Parse {@code string} and return a timestamp with millisecond precision.\n\t * \n\t * @param string\n\t * @return the corresponding timestamp\n\t */\n\tpublic static long parseLong(String string) {\n\t\treturn parseLong(string, new Date());\n\t}\n\t\n\tprotected static DateTime parseDateTime(String string, Date now){\n\t\treturn new DateTime(parseLong(string, now));\n\t}\n\n\t/**\n\t * Parse {@code string} relative to {@code now} and return a timestamp with\n\t * millisecond precision.\n\t * \n\t * @param string\n\t * @param now\n\t * @return the corresponding timestamp\n\t */\n\tprotected static long parseLong(String string, Date now) {\n\t\tPreconditions.checkArgument(!Strings.isNullOrEmpty(string));\n\t\ttry {\n\t\t\tfor (PatternAndFormat paf : known) {\n\t\t\t\tMatcher m = paf.matches(string);\n\t\t\t\tif(m.matches()) {\n\t\t\t\t\tLong time = paf.parse(string, now, m);\n\t\t\t\t\tlog.debug(String.format(\"{} triggered format {}: {}\",\n\t\t\t\t\t\t\tstring, paf.f, new Date(time)));\n\t\t\t\t\treturn time;\n\t\t\t\t}\n\t\t\t}\n\t\t\tlog.debug(String.format(\"Unrecognized date/time string {}\", string));\n\t\t\tthrow new ParseException(\"Unrecognized date/time string '\" + string\n\t\t\t\t\t+ \"'\", 0);\n\t\t}\n\t\tcatch (Exception e) { // thrown by various features of the parser\n\t\t\tthrow Throwables.propagate(e);\n\t\t}\n\t}\n\n\t/**\n\t * Return the parse result.\n\t * \n\t * @param trimmedDateTimeString\n\t * @param now\n\t * @return\n\t * @throws ParseException\n\t */\n\tprivate static ParserResult getParserResult(String trimmedDateTimeString,\n\t\t\tDate now) throws ParseException {\n\t\tfor (PatternAndFormat paf : known) {\n\t\t\tMatcher m = paf.matches(trimmedDateTimeString);\n\t\t\tif(m.matches()) {\n\t\t\t\tlog.debug(String.format(\n\t\t\t\t\t\t\"Date/time string [%s] triggered format [%s]\",\n\t\t\t\t\t\ttrimmedDateTimeString, paf.f));\n\t\t\t\treturn new ParserResult(\n\t\t\t\t\t\tpaf.parse(trimmedDateTimeString, now, m), paf.f.type);\n\t\t\t}\n\t\t}\n\n\t\treturn null;\n\t}\n\n\tprivate static final Logger log = LoggerFactory\n\t\t\t.getLogger(StringToTime.class);\n\tstatic {\n\t\t// Set to Level.DEBUG to see information about string parsing logic\n\t\t((ch.qos.logback.classic.Logger) log).setLevel(Level.INFO);\n\t}\n\n\t// An expression of time (hour)(:(minute))?((:(second))(.(millisecond))?)?(\n\t// *(am?|pm?))?(RFC 822 time zone|general time zone)?\n\tprivate static final String timeExpr = \"(\\\\d{1,2})(:(\\\\d{1,2}))?(:(\\\\d{1,2})(\\\\.(\\\\d{1,3}))?)?( *(am?|pm?))?( *\\\\-\\\\d{4}|[a-z]{3}|[a-z ]+)?\";\n\n\t/**\n\t * Patterns and formats recognized by the algorithm; first match wins, so\n\t * insert most specific patterns first.\n\t */\n\tprivate static final PatternAndFormat[] known = {\n\n\t\t\t// TODO: ISO 8601 and derivatives\n\n\t\t\t// just the year\n\t\t\tnew PatternAndFormat(Pattern.compile(\"\\\\d{4}\"), new Format(\n\t\t\t\t\tFormatType.YEAR)),\n\n\t\t\t// decrement, e.g., -1 day\n\t\t\tnew PatternAndFormat(Pattern.compile(\"\\\\-( *\\\\d{1,} +[^ ]+){1,}\",\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(FormatType.DECREMENT)),\n\n\t\t\t// increment, e.g., +1 day\n\t\t\tnew PatternAndFormat(Pattern.compile(\"\\\\+?( *\\\\d{1,} +[^ ]+){1,}\",\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(FormatType.INCREMENT)),\n\n\t\t\t// e.g., October 26 and Oct 26\n\t\t\tnew PatternAndFormat(Pattern.compile(\"([a-z]+) +(\\\\d{1,2})\",\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(\n\t\t\t\t\tFormatType.MONTH_AND_DATE)),\n\n\t\t\t// e.g., 26 October 1981, or 26 Oct 1981, or 26 Oct 81\n\t\t\tnew PatternAndFormat(Pattern.compile(\n\t\t\t\t\t\"\\\\d{1,2} +[a-z]+ +(\\\\d{2}|\\\\d{4})\",\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(\"d MMM y\")),\n\n\t\t\t// now or today\n\t\t\tnew PatternAndFormat(\n\t\t\t\t\tPattern.compile(\n\t\t\t\t\t\t\t\"(midnight|now|today|(this +)?(morning|afternoon|evening)|tonight|noon( +tomorrow)?|tomorrow|tomorrow +(morning|afternoon|evening|night|noon)?|yesterday|yesterday +(morning|afternoon|evening|night)?)\",\n\t\t\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(\n\t\t\t\t\t\t\tFormatType.WORD)),\n\n\t\t\t// time, 24-hour and 12-hour\n\t\t\tnew PatternAndFormat(Pattern.compile(timeExpr,\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(FormatType.TIME)),\n\n\t\t\t// e.g., October 26, 1981 or Oct 26, 1981\n\t\t\tnew PatternAndFormat(Pattern.compile(\n\t\t\t\t\t\"[a-z]+ +\\\\d{1,2} *, *(\\\\d{2}|\\\\d{4})\",\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(\"MMM d, y\")),\n\n\t\t\t// e.g., 10/26/1981 or 10/26/81\n\t\t\tnew PatternAndFormat(Pattern.compile(\"\\\\d{1,2}/\\\\d{1,2}/\\\\d{2,4}\"),\n\t\t\t\t\tnew Format(\"M/d/y\")),\n\n\t\t\t// e.g., 10-26-1981 or 10-26-81\n\t\t\tnew PatternAndFormat(\n\t\t\t\t\tPattern.compile(\"\\\\d{1,2}\\\\-\\\\d{1,2}\\\\-\\\\d{2,4}\"),\n\t\t\t\t\tnew Format(\"M-d-y\")),\n\n\t\t\t// e.g., 10/26 or 10-26\n\t\t\tnew PatternAndFormat(\n\t\t\t\t\tPattern.compile(\"(\\\\d{1,2})(/|\\\\-)(\\\\d{1,2})\"), new Format(\n\t\t\t\t\t\t\tFormatType.MONTH_AND_DATE_WITH_SLASHES)),\n\n\t\t\t// e.g., 1981/10/26\n\t\t\tnew PatternAndFormat(Pattern.compile(\"\\\\d{4}/\\\\d{1,2}/\\\\d{1,2}\"),\n\t\t\t\t\tnew Format(\"y/M/d\")),\n\n\t\t\t// e.g., 1981-10-26\n\t\t\tnew PatternAndFormat(\n\t\t\t\t\tPattern.compile(\"\\\\d{4}\\\\-\\\\d{1,2}\\\\-\\\\d{1,2}\"),\n\t\t\t\t\tnew Format(\"y-M-d\")),\n\n\t\t\t// e.g., October or Oct\n\t\t\tnew PatternAndFormat(\n\t\t\t\t\tPattern.compile(\n\t\t\t\t\t\t\t\"(Jan(uary)?|Feb(ruary)?|Mar(ch)?|Apr(il)?|May|Jun(e)?|Jul(y)?|Aug(ust)?|Sep(tember)?|Oct(ober)?|Nov(ember)?|Dec(ember)?)\",\n\t\t\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(\n\t\t\t\t\t\t\tFormatType.MONTH)),\n\n\t\t\t// e.g., Tuesday or Tue\n\t\t\tnew PatternAndFormat(\n\t\t\t\t\tPattern.compile(\n\t\t\t\t\t\t\t\"(Sun(day)?|Mon(day)?|Tue(sday)?|Wed(nesday)?|Thu(rsday)?|Fri(day)?|Sat(urday)?)\",\n\t\t\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(\n\t\t\t\t\t\t\tFormatType.DAY_OF_WEEK)),\n\n\t\t\t// next, e.g., next Tuesday\n\t\t\tnew PatternAndFormat(Pattern.compile(\"next +(.*)\",\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(FormatType.NEXT)),\n\n\t\t\t// last, e.g., last Tuesday\n\t\t\tnew PatternAndFormat(Pattern.compile(\"last +(.*)\",\n\t\t\t\t\tPattern.CASE_INSENSITIVE), new Format(FormatType.LAST)),\n\n\t\t\t// compound statement\n\t\t\tnew PatternAndFormat(Pattern.compile(\"(.*) +(((\\\\+|\\\\-){1}.*)|\"\n\t\t\t\t\t+ timeExpr + \")$\", Pattern.CASE_INSENSITIVE), new Format(\n\t\t\t\t\tFormatType.COMPOUND))\n\n\t};\n\n\tprivate static class Format {\n\n\t\tprivate static Pattern unit = Pattern\n\t\t\t\t.compile(\"(\\\\d{1,}) +(s(ec(ond)?)?|mo(n(th)?)?|(hour|hr?)|d(ay)?|(w(eek)?|wk)|m(in(ute)?)?|(y(ear)?|yr))s?\");\n\n\t\tprivate static Pattern removeExtraSpaces = Pattern.compile(\" +\");\n\n\t\tprivate static Map translateDayOfWeek = new HashMap();\n\n\t\tstatic {\n\t\t\ttranslateDayOfWeek.put(\"sunday\", 1);\n\t\t\ttranslateDayOfWeek.put(\"sun\", 1);\n\t\t\ttranslateDayOfWeek.put(\"monday\", 2);\n\t\t\ttranslateDayOfWeek.put(\"mon\", 2);\n\t\t\ttranslateDayOfWeek.put(\"tuesday\", 3);\n\t\t\ttranslateDayOfWeek.put(\"tue\", 3);\n\t\t\ttranslateDayOfWeek.put(\"wednesday\", 4);\n\t\t\ttranslateDayOfWeek.put(\"wed\", 4);\n\t\t\ttranslateDayOfWeek.put(\"thursday\", 5);\n\t\t\ttranslateDayOfWeek.put(\"thu\", 5);\n\t\t\ttranslateDayOfWeek.put(\"friday\", 6);\n\t\t\ttranslateDayOfWeek.put(\"fri\", 6);\n\t\t\ttranslateDayOfWeek.put(\"saturday\", 7);\n\t\t\ttranslateDayOfWeek.put(\"sat\", 7);\n\t\t}\n\n\t\tprivate String sdf;\n\n\t\tprivate FormatType type;\n\n\t\tpublic Format(FormatType type) {\n\t\t\tthis.type = type;\n\t\t}\n\n\t\tpublic Format(String sdf) {\n\t\t\tthis.sdf = sdf;\n\t\t}\n\n\t\tpublic Date parse(String dateTimeString, Date now, Matcher m)\n\t\t\t\tthrows ParseException {\n\t\t\tif(sdf != null)\n\t\t\t\treturn new SimpleDateFormat(sdf).parse(dateTimeString);\n\t\t\telse {\n\t\t\t\tdateTimeString = removeExtraSpaces.matcher(dateTimeString)\n\t\t\t\t\t\t.replaceAll(\" \").toLowerCase();\n\n\t\t\t\ttry {\n\t\t\t\t\tCalendar cal = Calendar.getInstance();\n\t\t\t\t\tcal.setTime(now);\n\n\t\t\t\t\t// word expressions, e.g., \"now\" and \"today\" and \"tonight\"\n\t\t\t\t\tif(type == FormatType.WORD) {\n\t\t\t\t\t\tif(\"now\".equalsIgnoreCase(dateTimeString))\n\t\t\t\t\t\t\treturn (now != null ? now : new Date());\n\n\t\t\t\t\t\telse if(\"today\".equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"morning\".equalsIgnoreCase(dateTimeString)\n\t\t\t\t\t\t\t\t|| \"this morning\"\n\t\t\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, this morning begins at 07:00:00.000\n\t\t\t\t\t\t\tint thisMorningBeginsAt = Integer.parseInt(System\n\t\t\t\t\t\t\t\t\t.getProperty(StringToTime.class\n\t\t\t\t\t\t\t\t\t\t\t+ \".THIS_MORNING_BEGINS_AT\", \"7\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, thisMorningBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"noon\".equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\t// noon is 12:00:00.000\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, 12);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"afternoon\".equalsIgnoreCase(dateTimeString)\n\t\t\t\t\t\t\t\t|| \"this afternoon\"\n\t\t\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, this afternoon begins at 13:00:00.000\n\t\t\t\t\t\t\tint thisAfternoonBeginsAt = Integer\n\t\t\t\t\t\t\t\t\t.parseInt(System\n\t\t\t\t\t\t\t\t\t\t\t.getProperty(\n\t\t\t\t\t\t\t\t\t\t\t\t\tStringToTime.class\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t+ \".THIS_AFTERNOON_BEGINS_AT\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\"13\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, thisAfternoonBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"evening\".equalsIgnoreCase(dateTimeString)\n\t\t\t\t\t\t\t\t|| \"this evening\"\n\t\t\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, this evening begins at 17:00:00.000\n\t\t\t\t\t\t\tint thisEveningBeginsAt = Integer.parseInt(System\n\t\t\t\t\t\t\t\t\t.getProperty(StringToTime.class\n\t\t\t\t\t\t\t\t\t\t\t+ \".THIS_EVENING_BEGINS_AT\", \"17\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, thisEveningBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"tonight\".equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\t// by default, tonight begins at 20:00:00.000\n\t\t\t\t\t\t\tint tonightBeginsAt = Integer.parseInt(System\n\t\t\t\t\t\t\t\t\t.getProperty(StringToTime.class\n\t\t\t\t\t\t\t\t\t\t\t+ \".TONIGHT_BEGINS_AT\", \"20\"));\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, tonightBeginsAt);\n\t\t\t\t\t\t\tcal.set(Calendar.MINUTE, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.SECOND, 0);\n\t\t\t\t\t\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"midnight\".equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(\n\t\t\t\t\t\t\t\t\tparseLong(\"00:00:00 +24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"tomorrow\".equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"now +24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"tomorrow morning\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"morning +24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"tomorrow noon\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)\n\t\t\t\t\t\t\t\t|| \"noon tomorrow\"\n\t\t\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"noon +24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"tomorrow afternoon\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"afternoon +24 hours\",\n\t\t\t\t\t\t\t\t\tnow));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"tomorrow evening\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"evening +24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"tomorrow night\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"tonight +24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"yesterday\".equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"now -24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"yesterday morning\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"morning -24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"yesterday noon\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)\n\t\t\t\t\t\t\t\t|| \"noon yesterday\"\n\t\t\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"noon -24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"yesterday afternoon\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"afternoon -24 hours\",\n\t\t\t\t\t\t\t\t\tnow));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"yesterday evening\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"evening -24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(\"yesterday night\"\n\t\t\t\t\t\t\t\t.equalsIgnoreCase(dateTimeString)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"tonight -24 hours\", now));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tthrow new ParseException(String.format(\n\t\t\t\t\t\t\t\t\t\"Unrecognized date word: %s\",\n\t\t\t\t\t\t\t\t\tdateTimeString), 0);\n\t\t\t\t\t}\n\n\t\t\t\t\t// time expressions, 24-hour and 12-hour\n\t\t\t\t\telse if(type == FormatType.TIME) {\n\t\t\t\t\t\t// An expression of time\n\t\t\t\t\t\t// (hour)(:(minute))?((:(second))(.(millisecond))?)?(\n\t\t\t\t\t\t// *(am?|pm?))?(RFC 822 time zone|general time zone)?\n\t\t\t\t\t\tString hour = m.group(1);\n\t\t\t\t\t\tString min = m.group(3);\n\t\t\t\t\t\tString sec = m.group(5);\n\t\t\t\t\t\tString ms = m.group(7);\n\t\t\t\t\t\tString amOrPm = m.group(8);\n\n\t\t\t\t\t\tif(hour != null) {\n\t\t\t\t\t\t\tif(amOrPm != null)\n\t\t\t\t\t\t\t\tcal.set(Calendar.HOUR, new Integer(hour));\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY, new Integer(hour));\n\t\t\t\t\t\t}\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tcal.set(Calendar.HOUR, 0);\n\n\t\t\t\t\t\tcal.set(Calendar.MINUTE,\n\t\t\t\t\t\t\t\t(min != null ? new Integer(min) : 0));\n\t\t\t\t\t\tcal.set(Calendar.SECOND,\n\t\t\t\t\t\t\t\t(sec != null ? new Integer(sec) : 0));\n\t\t\t\t\t\tcal.set(Calendar.MILLISECOND,\n\t\t\t\t\t\t\t\t(ms != null ? new Integer(ms) : 0));\n\n\t\t\t\t\t\tif(amOrPm != null)\n\t\t\t\t\t\t\tcal.set(Calendar.AM_PM,\n\t\t\t\t\t\t\t\t\t(amOrPm.equalsIgnoreCase(\"a\")\n\t\t\t\t\t\t\t\t\t\t\t|| amOrPm.equalsIgnoreCase(\"am\") ? Calendar.AM\n\t\t\t\t\t\t\t\t\t\t\t: Calendar.PM));\n\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\n\t\t\t\t\t// increments\n\t\t\t\t\telse if(type == FormatType.INCREMENT\n\t\t\t\t\t\t\t|| type == FormatType.DECREMENT) {\n\t\t\t\t\t\tMatcher units = unit.matcher(dateTimeString);\n\t\t\t\t\t\twhile (units.find()) {\n\t\t\t\t\t\t\tInteger val = new Integer(units.group(1))\n\t\t\t\t\t\t\t\t\t* (type == FormatType.DECREMENT ? -1 : 1);\n\t\t\t\t\t\t\tString u = units.group(2);\n\n\t\t\t\t\t\t\t// second\n\t\t\t\t\t\t\tif(\"s\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"sec\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"second\".equalsIgnoreCase(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.SECOND,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.SECOND) + val);\n\n\t\t\t\t\t\t\t// minute\n\t\t\t\t\t\t\telse if(\"m\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"min\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"minute\".equalsIgnoreCase(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.MINUTE,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.MINUTE) + val);\n\n\t\t\t\t\t\t\t// hour\n\t\t\t\t\t\t\telse if(\"h\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"hr\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"hour\".equalsIgnoreCase(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.HOUR_OF_DAY,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.HOUR_OF_DAY) + val);\n\n\t\t\t\t\t\t\t// day\n\t\t\t\t\t\t\telse if(\"d\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"day\".equalsIgnoreCase(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE)\n\t\t\t\t\t\t\t\t\t\t+ val);\n\n\t\t\t\t\t\t\t// week\n\t\t\t\t\t\t\telse if(\"w\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"wk\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"week\".equalsIgnoreCase(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.WEEK_OF_YEAR) + val);\n\n\t\t\t\t\t\t\t// month\n\t\t\t\t\t\t\telse if(\"mo\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"mon\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"month\".equalsIgnoreCase(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.MONTH, cal.get(Calendar.MONTH)\n\t\t\t\t\t\t\t\t\t\t+ val);\n\n\t\t\t\t\t\t\t// year\n\t\t\t\t\t\t\telse if(\"y\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"yr\".equalsIgnoreCase(u)\n\t\t\t\t\t\t\t\t\t|| \"year\".equalsIgnoreCase(u))\n\t\t\t\t\t\t\t\tcal.set(Calendar.YEAR, cal.get(Calendar.YEAR)\n\t\t\t\t\t\t\t\t\t\t+ val);\n\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tthrow new IllegalArgumentException(\n\t\t\t\t\t\t\t\t\t\tString.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\"Unrecognized %s unit: [%s]\",\n\t\t\t\t\t\t\t\t\t\t\t\ttype, u));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\n\t\t\t\t\t// compound expressions\n\t\t\t\t\telse if(type == FormatType.COMPOUND) {\n\t\t\t\t\t\treturn new Date(parseLong(m.group(2), new Date(\n\t\t\t\t\t\t\t\tparseLong(m.group(1), now))));\n\t\t\t\t\t}\n\n\t\t\t\t\t// month of the year\n\t\t\t\t\telse if(type == FormatType.MONTH) {\n\t\t\t\t\t\tCalendar ref = Calendar.getInstance();\n\t\t\t\t\t\tref.setTime(new SimpleDateFormat(\"MMM d, y\")\n\t\t\t\t\t\t\t\t.parse(String.format(\"%s 1, 1970\", m.group(1))));\n\t\t\t\t\t\tcal.set(Calendar.MONTH, ref.get(Calendar.MONTH));\n\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\n\t\t\t\t\t// day of week\n\t\t\t\t\telse if(type == FormatType.DAY_OF_WEEK) {\n\t\t\t\t\t\tInteger ref = translateDayOfWeek.get(dateTimeString);\n\t\t\t\t\t\tif(cal.get(Calendar.DAY_OF_WEEK) >= ref)\n\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR,\n\t\t\t\t\t\t\t\t\tcal.get(Calendar.WEEK_OF_YEAR) + 1);\n\t\t\t\t\t\tcal.set(Calendar.DAY_OF_WEEK, ref);\n\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\n\t\t\t\t\t// month and day with slashes\n\t\t\t\t\telse if(type == FormatType.MONTH_AND_DATE_WITH_SLASHES) {\n\t\t\t\t\t\tCalendar ref = Calendar.getInstance();\n\t\t\t\t\t\tref.setTime(new SimpleDateFormat(\"M/d/y\").parse(String\n\t\t\t\t\t\t\t\t.format(\"%s/%s/1970\", m.group(1), m.group(3))));\n\t\t\t\t\t\tcal.set(Calendar.MONTH, ref.get(Calendar.MONTH));\n\t\t\t\t\t\tcal.set(Calendar.DATE, ref.get(Calendar.DATE));\n\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\n\t\t\t\t\t// month and day long-hand\n\t\t\t\t\telse if(type == FormatType.MONTH_AND_DATE) {\n\t\t\t\t\t\tCalendar ref = Calendar.getInstance();\n\t\t\t\t\t\tref.setTime(new SimpleDateFormat(\"MMM d, y\")\n\t\t\t\t\t\t\t\t.parse(String.format(\"%s %s, 1970\", m.group(1),\n\t\t\t\t\t\t\t\t\t\tm.group(2))));\n\t\t\t\t\t\tcal.set(Calendar.MONTH, ref.get(Calendar.MONTH));\n\t\t\t\t\t\tcal.set(Calendar.DATE, ref.get(Calendar.DATE));\n\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\n\t\t\t\t\t// next X\n\t\t\t\t\telse if(type == FormatType.NEXT) {\n\t\t\t\t\t\t// Format types MONTH and DAY_OF_WEEK both return future\n\t\t\t\t\t\t// dates, so no additional processing is needed\n\t\t\t\t\t\tString expr = m.group(1);\n\t\t\t\t\t\tParserResult parsed = StringToTime.getParserResult(\n\t\t\t\t\t\t\t\texpr, now);\n\n\t\t\t\t\t\tif(parsed != null\n\t\t\t\t\t\t\t\t&& (FormatType.MONTH.equals(parsed.type)\n\t\t\t\t\t\t\t\t\t\t|| FormatType.DAY_OF_WEEK\n\t\t\t\t\t\t\t\t\t\t\t\t.equals(parsed.type) || FormatType.MONTH_AND_DATE\n\t\t\t\t\t\t\t\t\t\t\t.equals(parsed.type)))\n\t\t\t\t\t\t\treturn new Date(parsed.timestamp);\n\t\t\t\t\t\telse {\n\t\t\t\t\t\t\tif(\"week\".equalsIgnoreCase(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.WEEK_OF_YEAR) + 1);\n\t\t\t\t\t\t\telse if(\"month\".equalsIgnoreCase(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.MONTH,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.MONTH) + 1);\n\t\t\t\t\t\t\telse if(\"year\".equalsIgnoreCase(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.YEAR,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.YEAR) + 1);\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tthrow new IllegalArgumentException(\n\t\t\t\t\t\t\t\t\t\tString.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\"Invalid expression of time: %s\",\n\t\t\t\t\t\t\t\t\t\t\t\tdateTimeString));\n\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// last X\n\t\t\t\t\telse if(type == FormatType.LAST) {\n\t\t\t\t\t\tString expr = m.group(1);\n\t\t\t\t\t\tParserResult parsed = StringToTime.getParserResult(\n\t\t\t\t\t\t\t\texpr, now);\n\n\t\t\t\t\t\tif(parsed != null\n\t\t\t\t\t\t\t\t&& (FormatType.MONTH.equals(parsed.type) || FormatType.MONTH_AND_DATE\n\t\t\t\t\t\t\t\t\t\t.equals(parsed.type))) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"-1 year\", new Date(\n\t\t\t\t\t\t\t\t\tparsed.timestamp)));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse if(parsed != null\n\t\t\t\t\t\t\t\t&& FormatType.DAY_OF_WEEK.equals(parsed.type)) {\n\t\t\t\t\t\t\treturn new Date(parseLong(\"-1 week\", new Date(\n\t\t\t\t\t\t\t\t\tparsed.timestamp)));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\telse {\n\t\t\t\t\t\t\tif(\"week\".equalsIgnoreCase(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.WEEK_OF_YEAR,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.WEEK_OF_YEAR) - 1);\n\t\t\t\t\t\t\telse if(\"month\".equalsIgnoreCase(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.MONTH,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.MONTH) - 1);\n\t\t\t\t\t\t\telse if(\"year\".equalsIgnoreCase(expr))\n\t\t\t\t\t\t\t\tcal.set(Calendar.YEAR,\n\t\t\t\t\t\t\t\t\t\tcal.get(Calendar.YEAR) - 1);\n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\tthrow new IllegalArgumentException(\n\t\t\t\t\t\t\t\t\t\tString.format(\n\t\t\t\t\t\t\t\t\t\t\t\t\"Invalid expression of time: %s\",\n\t\t\t\t\t\t\t\t\t\t\t\tdateTimeString));\n\n\t\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// year\n\t\t\t\t\telse if(type == FormatType.YEAR) {\n\t\t\t\t\t\tcal.set(Calendar.YEAR, new Integer(m.group(0)));\n\t\t\t\t\t\treturn new Date(cal.getTimeInMillis());\n\t\t\t\t\t}\n\n\t\t\t\t\t// unimplemented format type\n\t\t\t\t\telse\n\t\t\t\t\t\tthrow new IllegalStateException(String.format(\n\t\t\t\t\t\t\t\t\"Unimplemented FormatType: %s\", type));\n\t\t\t\t}\n\t\t\t\tcatch (ParseException e) {\n\t\t\t\t\tthrow e;\n\t\t\t\t}\n\t\t\t\tcatch (IllegalStateException e) {\n\t\t\t\t\tthrow e;\n\t\t\t\t}\n\t\t\t\tcatch (IllegalArgumentException e) {\n\t\t\t\t\tthrow e;\n\t\t\t\t}\n\t\t\t\tcatch (Exception e) {\n\t\t\t\t\tthrow new RuntimeException(String.format(\n\t\t\t\t\t\t\t\"Unknown failure in string-to-time conversion: %s\",\n\t\t\t\t\t\t\te.getMessage()), e);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t@Override\n\t\tpublic String toString() {\n\t\t\tif(sdf != null)\n\t\t\t\treturn sdf;\n\t\t\telse\n\t\t\t\treturn type.toString();\n\t\t}\n\t}\n\n\tprivate enum FormatType {\n\t\tCOMPOUND,\n\t\tMONTH_AND_DATE_WITH_SLASHES,\n\t\tMONTH_AND_DATE,\n\t\tMONTH,\n\t\tDAY_OF_WEEK,\n\t\tNEXT,\n\t\tLAST,\n\t\tINCREMENT,\n\t\tDECREMENT,\n\t\tWORD,\n\t\tTIME,\n\t\tYEAR\n\t}\n\n\tprivate static class ParserResult {\n\t\tpublic FormatType type;\n\t\tpublic Long timestamp;\n\n\t\tpublic ParserResult(Long timestamp, FormatType type) {\n\t\t\tthis.timestamp = timestamp;\n\t\t\tthis.type = type;\n\t\t}\n\t}\n\n\tprivate static class PatternAndFormat {\n\t\tpublic Pattern p;\n\t\tpublic Format f;\n\n\t\tpublic PatternAndFormat(Pattern p, Format f) {\n\t\t\tthis.p = p;\n\t\t\tthis.f = f;\n\t\t}\n\n\t\tpublic Matcher matches(String dateTimeString) {\n\t\t\treturn p.matcher(dateTimeString);\n\t\t}\n\n\t\tpublic Long parse(String dateTimeString, Date now, Matcher m)\n\t\t\t\tthrows ParseException {\n\t\t\treturn f.parse(dateTimeString, now, m).getTime();\n\t\t}\n\t}\n\n}\n", "src/test/java/com/clutch/dates/StringToTimeTest.java": "package com.clutch.dates;\n\nimport java.text.SimpleDateFormat;\nimport java.util.Calendar;\nimport java.util.Date;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\npublic class StringToTimeTest {\n\n\t@Test\n\tpublic void testMySqlDateFormat() {\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tcal.set(Calendar.YEAR, 1981);\n\t\tcal.set(Calendar.HOUR_OF_DAY, 15);\n\t\tcal.set(Calendar.MINUTE, 26);\n\t\tcal.set(Calendar.SECOND, 3);\n\t\tcal.set(Calendar.MILLISECOND, 435);\n\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"1981-10-26 15:26:03.435\").toDate());\n\t}\n\n\t/*\n\t * FIXME\n\t * \n\t * @Test public void testISO8601() {\n\t * Date now = new Date();\n\t * Calendar cal = Calendar.getInstance();\n\t * cal.setTime(now);\n\t * \n\t * cal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t * cal.set(Calendar.DATE, 26);\n\t * cal.set(Calendar.YEAR, 1981);\n\t * cal.set(Calendar.HOUR_OF_DAY, 15);\n\t * cal.set(Calendar.MINUTE, 25);\n\t * cal.set(Calendar.SECOND, 2);\n\t * cal.set(Calendar.MILLISECOND, 435);\n\t * \n\t * Assert.assertEquals(new Date(cal.getTimeInMillis()), new\n\t * StringToTime(\"1981-10-26T15:26:03.435ZEST\", now).toDate());\n\t * }\n\t */\n\n\t@Test\n\tpublic void test1200Seconds() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\n\t\tcal.set(Calendar.SECOND, cal.get(Calendar.SECOND) + 1200);\n\t\tAssert.assertTrue(new Date(cal.getTimeInMillis()).equals(StringToTime\n\t\t\t\t.parseDateTime(\"+1200 s\", now).toDate()));\n\t\tAssert.assertFalse(new Date(cal.getTimeInMillis()).equals(StringToTime\n\t\t\t\t.parseDateTime(\"+1 s\", now).toDate()));\n\t}\n\n\t@Test\n\tpublic void testVariousExpressionsOfTimeOfDay() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\n\t\tcal.set(Calendar.HOUR_OF_DAY, 23);\n\t\tcal.set(Calendar.MINUTE, 59);\n\t\tcal.set(Calendar.SECOND, 59);\n\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"11:59:59 PM\", now).toDate());\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"23:59:59\", now).toDate());\n\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"23:59\", now).toDate());\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"11:59 PM\", now).toDate());\n\n\t\tcal.set(Calendar.MILLISECOND, 123);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()),\n\t\t\t\tStringToTime.parseDateTime(\"23:59:00.123\"));\n\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tcal.set(Calendar.YEAR, 1981);\n\t\tcal.set(Calendar.HOUR_OF_DAY, 15);\n\t\tcal.set(Calendar.MINUTE, 27);\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"October 26, 1981 3:27:00 PM\", now).toDate());\n\n\t\tcal.set(Calendar.HOUR, 5);\n\t\tcal.set(Calendar.MINUTE, 0);\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tcal.set(Calendar.AM_PM, Calendar.PM);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"10/26/81 5PM\", now).toDate());\n\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE) + 1);\n\t\tcal.set(Calendar.HOUR, 5);\n\t\tcal.set(Calendar.MINUTE, 0);\n\t\tcal.set(Calendar.SECOND, 0);\n\t\tcal.set(Calendar.MILLISECOND, 0);\n\t\tcal.set(Calendar.AM_PM, Calendar.PM);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"tomorrow 5PM\", now).toDate());\n\n\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE) - 2);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"yesterday 5PM\", now).toDate());\n\t\tAssert.assertEquals(StringToTime\n\t\t\t\t.parseDateTime(\"yesterday evening\", now).toDate(), StringToTime\n\t\t\t\t.parseDateTime(\"yesterday 5PM\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testNow() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(new Date(now.getTime()), StringToTime\n\t\t\t\t.parseDateTime(\"now\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testToday() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"00:00:00.000\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"today\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testThisMorning() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"07:00:00.000\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"this morning\", now)\n\t\t\t\t.toDate());\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"morning\", now).toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"this morning\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testNoon() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"12:00:00.000\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"noon\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testThisAfternoon() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"13:00:00.000\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"this afternoon\", now)\n\t\t\t\t.toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"afternoon\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"this afternoon\", now)\n\t\t\t\t.toDate());\n\t}\n\n\t@Test\n\tpublic void testThisEvening() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"17:00:00.000\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"this evening\", now)\n\t\t\t\t.toDate());\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"evening\", now).toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"this evening\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testTonight() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"20:00:00.000\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"tonight\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testIncrements() {\n\t\tDate now = new Date();\n\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) + 1);\n\t\tAssert.assertEquals(cal.getTimeInMillis(),\n\t\t\t\tStringToTime.parseDateTime(\"+1 hour\", now).toDate());\n\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.WEEK_OF_YEAR, cal.get(Calendar.WEEK_OF_YEAR) + 52);\n\t\tAssert.assertEquals(cal.getTimeInMillis(),\n\t\t\t\tStringToTime.parseDateTime(\"+52 weeks\", now).toDate());\n\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"1 year\", now).toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"+1 year\", now).toDate());\n\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"+1 year\", now).toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"+12 months\", now).toDate());\n\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"+1 year 6 months\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"+18 months\", now)\n\t\t\t\t.toDate());\n\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"12 months 1 day 60 seconds\", now)\n\t\t\t\t\t\t.toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"1 year 24 hours 1 minute\", now)\n\t\t\t\t\t\t.toDate());\n\t}\n\n\t@Test\n\tpublic void testDecrements() {\n\t\tDate now = new Date();\n\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) - 1);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"-1 hour\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testTomorrow() {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\t\tcal.set(Calendar.DATE, cal.get(Calendar.DATE) + 1);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"tomorrow\", now).toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"now +24 hours\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"tomorrow\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testTomorrowMorning() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"this morning +24 hours\", now)\n\t\t\t\t\t\t.toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"tomorrow morning\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testTomorrowNoon() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"noon +24 hours\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"tomorrow noon\", now)\n\t\t\t\t.toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"noon +24 hours\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"noon tomorrow\", now)\n\t\t\t\t.toDate());\n\t}\n\n\t@Test\n\tpublic void testTomorrowAfternoon() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"this afternoon +24 hours\", now)\n\t\t\t\t\t\t.toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"tomorrow afternoon\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testTomorrowEvening() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"this evening +24 hours\", now)\n\t\t\t\t\t\t.toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"tomorrow evening\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testTomorrowNight() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime\n\t\t\t\t.parseDateTime(\"tonight +24 hours\", now).toDate(), StringToTime\n\t\t\t\t.parseDateTime(\"tomorrow night\", now).toDate());\n\t}\n\n\t// e.g., October 26, 1981, or Oct 26, 1981, or 26 October 1981, or 26 Oct\n\t// 1981, or 26 Oct 81\n\t@Test\n\tpublic void testLongHand() throws Exception {\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"October 26, 1981\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"Oct 26, 1981\"));\n\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"26 October 1981\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"26 Oct 1981\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"26 Oct 81\"));\n\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"26 october 1981\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"26 oct 1981\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"26 oct 81\"));\n\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"),\n\t\t\t\tStringToTime.parseDateTime(\"1 Jan 2000\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"),\n\t\t\t\tStringToTime.parseDateTime(\"1 Jan 00\"));\n\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"),\n\t\t\t\tStringToTime.parseDateTime(\"1 jan 2000\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"1/1/2000\"),\n\t\t\t\tStringToTime.parseDateTime(\"1 jan 00\"));\n\t}\n\n\t// e.g., 10/26/1981 or 10/26/81\n\t@Test\n\tpublic void testWithSlahesMonthFirst() throws Exception {\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"10/26/1981\"));\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"10/26/81\"));\n\t}\n\n\t// e.g., 1981/10/26\n\t@Test\n\tpublic void testWithSlashesYearFirst() throws Exception {\n\t\tAssert.assertEquals(new SimpleDateFormat(\"M/d/y\").parse(\"10/26/1981\"),\n\t\t\t\tStringToTime.parseDateTime(\"1981/10/26\"));\n\t}\n\n\t// e.g., October 26 and Oct 26\n\t@Test\n\tpublic void testMonthAndDate() throws Exception {\n\t\tDate now = new Date();\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"October 26\", now).toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"Oct 26\", now).toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"October 26\", now).toDate());\n\t}\n\n\t// e.g., 10/26\n\t@Test\n\tpublic void testWithSlahesMonthAndDate() throws Exception {\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.set(Calendar.MONTH, Calendar.OCTOBER);\n\t\tcal.set(Calendar.DATE, 26);\n\t\tAssert.assertEquals(new Date(cal.getTimeInMillis()), StringToTime\n\t\t\t\t.parseDateTime(\"10/26\").toDate());\n\t}\n\n\t// e.g., October or Oct\n\t@Test\n\tpublic void testMonth() throws Exception {\n\t\tDate now = new Date();\n\n\t\tAssert.assertEquals(\n\t\t\t\tStringToTime.parseDateTime(\"October\", now).toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"Oct\", now));\n\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\n\t\tCalendar cal2 = Calendar.getInstance();\n\n\t\t// it should be this year\n\t\tcal2.setTime(StringToTime.parseDateTime(\"January\", now).toDate());\n\t\tAssert.assertEquals(cal.get(Calendar.YEAR), cal2.get(Calendar.YEAR));\n\t\tcal2.setTime(StringToTime.parseDateTime(\"December\", now).toDate());\n\t\tAssert.assertEquals(cal.get(Calendar.YEAR), cal2.get(Calendar.YEAR));\n\t}\n\n\t@Test\n\tpublic void testDayOfWeek() throws Exception {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"Friday\", now).toDate(),\n\t\t\t\tStringToTime.parseDateTime(\"Fri\", now).toDate());\n\n\t\tCalendar cal = Calendar.getInstance();\n\t\tcal.setTime(now);\n\n\t\t// if today's day of the week is greater than or equal to our test day\n\t\t// of the week (Wednesday)\n\t\tCalendar cal2 = Calendar.getInstance();\n\t\tif(cal.get(Calendar.DAY_OF_WEEK) >= 3) {// then the day of the week on\n\t\t\t\t\t\t\t\t\t\t\t\t// the date returned should be\n\t\t\t\t\t\t\t\t\t\t\t\t// next week\n\t\t\tcal2.setTime(StringToTime.parseDateTime(\"Wednesday\", now).toDate());\n\t\t\tAssert.assertEquals(cal.get(Calendar.WEEK_OF_YEAR) + 1,\n\t\t\t\t\tcal2.get(Calendar.WEEK_OF_YEAR));\n\t\t}\n\t\telse {\n\t\t\t// otherwise, it should be this year\n\t\t\tcal2.setTime(StringToTime.parseDateTime(\"Wednesday\", now).toDate());\n\t\t\tAssert.assertEquals(cal.get(Calendar.WEEK_OF_YEAR),\n\t\t\t\t\tcal2.get(Calendar.WEEK_OF_YEAR));\n\t\t}\n\t}\n\n\t@Test\n\tpublic void testNext() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"next January 15\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"Jan 15\", now).toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"next Dec\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"December\", now).toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"next Sunday\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"Sun\", now).toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"next Sat\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"Saturday\", now).toDate());\n\t}\n\n\t@Test\n\tpublic void testLast() {\n\t\tDate now = new Date();\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"last January 15\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"Jan 15 -1 year\", now)\n\t\t\t\t.toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"last Dec\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"December -1 year\", now)\n\t\t\t\t.toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"last Sunday\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"Sun -1 week\", now)\n\t\t\t\t.toDate());\n\t\tAssert.assertEquals(StringToTime.parseDateTime(\"last Sat\", now)\n\t\t\t\t.toDate(), StringToTime.parseDateTime(\"Saturday -1 week\", now)\n\t\t\t\t.toDate());\n\t}\n\n}\n"}}
-{"repo": "nbarbey/TomograPy", "pr_number": 1, "title": "Converts to Rust and adds modern features", "state": "closed", "merged_at": "2023-09-28T19:47:41Z", "additions": 2784, "deletions": 4852, "files_changed": ["exemples/test_bpj.py", "exemples/test_siddon_cgs_secchi.py", "exemples/test_siddon_cor1.py", "exemples/test_siddon_lo.py", "exemples/test_siddon_secchi.py", "exemples/test_siddon_secchi_dt.py", "exemples/test_siddon_secchi_mask.py", "exemples/test_siddon_simu.py", "exemples/test_siddon_simu_dt.py", "exemples/test_siddon_simu_dt_lo.py", "exemples/test_siddon_simu_lo.py", "exemples/test_siddon_simu_sun.py", "exemples/test_thomson_simu.py", "experiments.py", "experiments_dottest.py", "experiments_filtered_bpj.py", "experiments_multi_view.py", "experiments_only_bpj.py", "experiments_single_view.py", "experiments_solar.py", "python/tomograpy/__init__.py", "python/tomograpy/coordinates.py", "python/tomograpy/third.py"], "files_before": {"exemples/test_bpj.py": "#!/usr/bin/env python\n\n\"\"\"\nSmall projection test to compare with IDL tomograpy.\n\"\"\"\n\nimport numpy as np\nimport tomograpy\nim = tomograpy.centered_stack(0.0016, 32, n_images=1, radius=200., fill=0.)\ncube = tomograpy.centered_cubic_map(3, 256, fill=1.)\nP = tomograpy.lo(im.header, cube.header, obstacle=\"sun\")\nim[:] = (P * cube.ravel()).reshape(im.shape)\n", "exemples/test_siddon_cgs_secchi.py": "#!/usr/bin/env python\nimport numpy as np\nimport os\nimport copy\nimport time\nimport tomograpy\nimport fitsarray as fa\nimport lo\nimport scipy.sparse.linalg as spl\n# data \npath = os.path.join(os.getenv('HOME'), 'data', '171dec08')\nobsrvtry = 'STEREO_A'\ntime_window = ['2008-12-01T00:00:00.000', '2008-12-03T00:00:00.000']\n# one image every time_step seconds\ntime_step = 4 * 3600.\ndata = tomograpy.secchi.read_data(path, bin_factor=4,\n obsrvtry=obsrvtry,\n time_window=time_window, \n time_step=time_step)\n# cube\nshape = 3 * (128,)\nheader = {'CRPIX1':64., 'CRPIX2':64., 'CRPIX3':64.,\n 'CDELT1':0.0234375, 'CDELT2':0.0234375, 'CDELT3':0.0234375,\n 'CRVAL1':0., 'CRVAL2':0., 'CRVAL3':0.,}\ncube = fa.zeros(shape, header=header)\n# model\nP = tomograpy.lo(data.header, cube.header)\nD = [lo.diff(cube.shape, axis=i) for i in xrange(cube.ndim)]\nhypers = cube.ndim * (1e0, )\n# inversion\nt = time.time()\nA = P.T * P + np.sum([h * d.T * d for h, d in zip(hypers, D)])\nb = P.T * data.flatten()\n#callback = lo.iterative.CallbackFactory(verbose=True)\n#x, info = spl.bicgstab(A, b, maxiter=100, callback=callback)\nx, info = lo.acg(P, data.flatten(), D, hypers, maxiter=100,)\nsol = cube.copy()\nsol[:] = x.reshape(cube.shape)\nprint(time.time() - t)\n", "exemples/test_siddon_cor1.py": "#!/usr/bin/env python\nimport numpy as np\nimport os\nimport copy\nimport time\nimport tomograpy\nimport fitsarray as fa\nimport lo\n\n# data \npath = os.path.join(os.getenv('HOME'), 'data', 'tomograpy., 'cor1')\n#obsrvtry = 'SOHO '\n#instrume = 'LASCO '\ntime_window = ['2009/09/01 00:00:00.000', '2009/09/15 00:00:00.000']\ntime_step = 8 * 3600. # one image every time_step seconds\ndata = tomograpy.solar.read_data(path, bin_factor=8,\n #time_window=time_window, \n #time_step=time_step\n )\n# errors in data ...\ndata.header['BITPIX'][:] = -64\ndata[np.isnan(data)] = 0.\ndata.header['RSUN'] /= 16.\n# cube\nshape = np.asarray(3 * (128. ,))\ncrpix = shape / 2.\ncdelt = 6. / shape\ncrval = np.zeros(3)\nheader = {'CRPIX1':crpix[0], 'CRPIX2':crpix[1], 'CRPIX3':crpix[2],\n 'CDELT1':cdelt[0], 'CDELT2':cdelt[1], 'CDELT3':cdelt[2],\n 'CRVAL1':0., 'CRVAL2':0., 'CRVAL3':0.,}\ncube = fa.zeros(shape, header=header)\nt = time.time()\ncube = tomograpy.backprojector(data, cube, obstacle=\"sun\")\nprint(\"backprojection time : \" + str(time.time() - t))\n\n# inversion\nt = time.time()\nu = .5\nkwargs={\n \"obj_rmin\":1.5,\n \"obj_rmax\":3.,\n \"data_rmin\":1.5,\n \"data_rmax\":2.5,\n \"mask_negative\":True\n}\nP, D, obj_mask, data_mask = tomograpy.models.thomson(data, cube, u, **kwargs)\n# bpj\nb = data.flatten()\nbpj = (P.T * b).reshape(cube.shape)\nhypers = 1e3 * np.ones(3)\nsol = lo.acg(P, b, D, hypers, maxiter=100, tol=1e-6)\nprint(\"inversion time : %f\" % (time.time() - t))\n# reshape solution\nsol.resize(cube.shape)\nsol = fa.asfitsarray(sol, header=cube.header)\n# reproject solution\nreproj = P * sol.ravel()\nreproj.resize(data.shape)\n", "exemples/test_siddon_lo.py": "#!/bin/env python\nimport numpy as np\nimport os\nimport copy\nimport time\nimport tomograpy\nimport fitsarray as fa\n# data \npath = os.path.join(os.getenv('HOME'), 'data', '171dec08')\nobsrvtry = 'STEREO_A'\ntime_window = ['2008-12-01T00:00:00.000', '2008-12-03T00:00:00.000']\ntime_step = 4 * 3600. # one image every time_step seconds\ndata = tomograpy.secchi.read_data(path, bin_factor=4,\n obsrvtry=obsrvtry,\n time_window=time_window, \n time_step=time_step)\n# cube\nshape = 3 * (128,)\nheader = dict()\nfor i in xrange(1, 4):\n header['CRPIX' + str(i)] = 64.\n header['CDELT' + str(i)] = 0.0234375\n header['CRVAL' + str(i)] = 0.\ncube = fa.zeros(shape, header=header, dtype=np.float32)\nP = tomograpy.sun_lo(data.header, cube.header)\nt = time.time()\nfbp = (P.T * data.flatten()).reshape(cube.shape)\nprint(\"backprojection time : \" + str(time.time() - t))\n\nt = time.time()\nfbp0 = tomograpy.backprojector_sun(data, cube)\nprint(\"backprojection time : \" + str(time.time() - t))\n\n#assert np.all(fbp == fbp0)\n", "exemples/test_siddon_secchi.py": "#!/usr/bin/env python\nimport numpy as np\nimport os\nimport copy\nimport time\nimport tomograpy\nimport fitsarray as fa\n# data \npath = os.path.join(os.getenv('HOME'), 'data', 'tomograpy., '171dec08')\nobsrvtry = 'STEREO_A'\ntime_window = ['2008-12-01T00:00:00.000', '2008-12-15T00:00:00.000']\ntime_step = 8 * 3600. # one image every time_step seconds\ndata = tomograpy.solar.read_data(path, bin_factor=8,\n obsrvtry=obsrvtry,\n time_window=time_window, \n time_step=time_step)\n# map\ncube = tomograpy.centered_cubic_map(3, 128, fill=0.)\nt = time.time()\ncube = tomograpy.backprojector(data, cube, obstacle=\"sun\")\nprint(\"backprojection time : \" + str(time.time() - t))\n", "exemples/test_siddon_secchi_dt.py": "#!/usr/bin/env python\nimport os\nimport time\nimport numpy as np\nimport lo\nimport tomograpy\nfrom tomograpy.solar import read_data\n# data\nobsrvtry = ('STEREO_A', 'STEREO_B')\ndata = tomograpy.solar.concatenate(\n [read_data(os.path.join(os.getenv('HOME'), 'data', 'tomograpy., '171dec08'), \n bin_factor=4,\n obsrvtry=obs,\n time_window=['2008-12-01T00:00:00.000', \n '2008-12-15T00:00:00.000'],\n time_step= 4 * 3600.\n )\n for obs in obsrvtry])\ndata = tomograpy.solar.sort_data_array(data)\n# scale A and B images\n# the ratio of sensitivity between EUVI A and B\ncalibration_ba = {171:0.902023, 195:0.974536, 284:0.958269, 304:1.05954}\nfor i in xrange(data.shape[-1]):\n if data.header['OBSRVTRY'][i] == 'STEREO_B':\n data[..., i] /= calibration_ba[data.header['WAVELNTH'][i]]\n\n# make sure it is 64 bits data\ndata.header['BITPIX'][:] = -64\n# cube\nshape = 3 * (128,)\nheader = {'CRPIX1':64.,\n 'CRPIX2':64.,\n 'CRPIX3':64.,\n 'CDELT1':0.0234375,\n 'CDELT2':0.0234375,\n 'CDELT3':0.0234375,\n 'CRVAL1':0.,\n 'CRVAL2':0.,\n 'CRVAL3':0.,}\ncube = tomograpy.fa.zeros(shape, header=header)\n# model\nkwargs = {'obj_rmin':1., 'obj_rmax':1.4, 'data_rmax':1.3,\n 'mask_negative':True, 'dt_min':100}\nP, D, obj_mask, data_mask = tomograpy.models.stsrt(data, cube, **kwargs)\n# apply mask to data\ndata *= (1 - data_mask)\n# hyperparameters\nhypers = (1e-1, 1e-1, 1e-1, 1e6)\n# test time for one projection\nt = time.time()\nu = P.T * data.ravel()\nprint(\"maximal time : %f\" % ((time.time() - t) * 100))\n# inversion\nt = time.time()\nb = data.ravel()\n#sol = lo.acg(P, b, D, hypers, maxiter=100)\nsol = lo.rls(P, b, D, hypers, maxiter=100)\n# reshape result\nfsol = tomograpy.fa.asfitsarray(sol.reshape(obj_mask.shape), header=header)\nprint(time.time() - t)\nfsol.tofits('stsrt_test.fits')\n", "exemples/test_siddon_secchi_mask.py": "#!/usr/bin/env python\nimport os\nimport time\nimport numpy as np\nimport lo\nimport tomograpy\nfrom tomograpy.solar import read_data\n# data\ndata = read_data(os.path.join(os.getenv('HOME'), 'data', 'tomograpy., '171dec08'),\n bin_factor=4.,\n time_window=['2008-12-01T00:00:00.000',\n '2008-12-15T00:00:00.000'],\n time_step=8 * 3600.\n )\ndata = tomograpy.solar.sort_data_array(data)\n# scale A and B images\n# the ratio of sensitivity between EUVI A and B\ncalibration_ba = {171:0.902023, 195:0.974536, 284:0.958269, 304:1.05954}\nfor i in xrange(data.shape[-1]):\n if data.header[i]['OBSRVTRY'] == 'STEREO_B':\n data[..., i] /= calibration_ba[data.header[i]['WAVELNTH']]\n\n # make sure it is 64 bits data\n data.header[i]['BITPIX'] = -64\n\n# cube\ncube = tomograpy.centered_cubic_map(3, 64, fill=0.)\n# model\nkwargs = {'obj_rmin':1., 'obj_rmax':1.5, 'data_rmin':1., 'data_rmax':1.3,\n 'mask_negative':False, 'mask_nan':True}\nP, D, obj_mask, data_mask = tomograpy.models.srt(data, cube, **kwargs)\n# apply mask to data\ndata *= (1 - data_mask)\ndata[np.isnan(data)] = 0.\n# hyperparameters\nhypers = cube.ndim * (1e0, )\n# inversion\n# expected time\nb = data.ravel()\nb[np.isnan(b)] = 0.\nt = time.time()\nbpj = P.T * b\nprint((time.time() - t) * 4 * 100 )\n# real time\nt = time.time()\nsol = lo.acg(P, b, D, hypers, maxiter=100)\nprint(time.time() - t)\n# reshape result\nfsol = tomograpy.fa.asfitsarray(sol.reshape(cube.shape), header=cube.header)\ntomograpy.path = os.path.join(os.getenv('HOME'), 'data', 'tomograpy.)\nfsol.tofits(os.path.join(tomograpy.path, \"output\", \"test_tomograpy.secchi_mask.fits\"))\n", "exemples/test_siddon_simu.py": "#!/usr/bin/env python\nimport time\nimport numpy as np\nimport tomograpy\n# object\nobject_header = tomograpy.centered_cubic_map_header(3, 128)\nobj = tomograpy.simu.object_from_header(object_header, fill=1.)\n# data\nradius = 200.\na = tomograpy.fov(object_header, radius)\ndata = tomograpy.centered_stack(a, 128, n_images=60, radius=200., max_lon=np.pi)\n# projection\nt = time.time()\ndata = tomograpy.projector(data, obj)\nprint(\"projection time : \" + str(time.time() - t))\n# backprojection\nt = time.time()\ndata[:] = 1.\nobj0 = tomograpy.simu.object_from_header(object_header, fill=0.)\nobj0 = tomograpy.backprojector(data, obj0)\nprint(\"backprojection time : \" + str(time.time() - t))\n\nobj1 = tomograpy.simu.object_from_header(object_header, fill=0.)\nobj1 = tomograpy.backprojector(data, obj1)\n", "exemples/test_siddon_simu_dt.py": "#!/usr/bin/env python\nimport time\nimport numpy as np\nimport tomograpy\n# object\nobj = tomograpy.centered_cubic_map(3, 128, fill=1.)\n# number of images\nn = 20\n# reshape object for 4d model\nobj4 = obj.reshape(obj.shape + (1,)).repeat(n, axis=-1)\nobj4.header['NAXIS'] = 4\nobj4.header['NAXIS4'] = obj4.shape[3]\nobj4.header['CRVAL4'] = 0.\n\n# data \nradius = 200\na = tomograpy.fov(obj.header, radius)\ndata = tomograpy.centered_stack(a, 128, n_images=n, radius=radius,\n max_lon=np.pi)\ndata[:] = np.zeros(data.shape)\n# projection\nt = time.time()\ndata = tomograpy.projector4d(data, obj4)\nprint(\"projection time : \" + str(time.time() - t))\n# backprojection\nx0 = obj4.copy()\nx0[:] = 0.\nt = time.time()\nx0 = tomograpy.backprojector4d(data, x0)\nprint(\"backprojection time : \" + str(time.time() - t))\n", "exemples/test_siddon_simu_dt_lo.py": "#!/usr/bin/env python\nimport time\nimport numpy as np\nimport tomograpy\nimport lo\n# object\nobj = tomograpy.centered_cubic_map(3, 64, fill=1.)\n# number of images\nn = 64\n# reshape object for 4d model\nobj4 = obj[..., np.newaxis].repeat(n, axis=-1)\nobj4.header['NAXIS'] = 4\nobj4.header['NAXIS4'] = obj4.shape[3]\nobj4.header['CRVAL4'] = 0.\n\n# data \nradius = 200\na = tomograpy.fov(obj.header, radius)\ndata1 = tomograpy.centered_stack(a, 64, n_images=n/2, radius=radius,\n max_lon=np.pi, fill=0.)\ndata2 = tomograpy.centered_stack(a, 64, n_images=n/2, radius=radius,\n min_lon=np.pi / 2., max_lon=1.5 * np.pi, fill=0.)\ndata = tomograpy.solar.concatenate((data1, data2))\n\n# times\nDT = 1000.\ndt_min = 100.\ndates = np.arange(n / 2) * DT / 2.\ndates = np.concatenate(2 * (dates, ))\ndates = [time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime((t))) for t in dates]\nfor i in xrange(len(data.header)):\n data.header[i]['DATE_OBS'] = dates[i]\n\ndata = tomograpy.solar.sort_data_array(data)\n\n# projection\nt = time.time()\ndata = tomograpy.projector4d(data, obj4, obstacle=\"sun\")\nprint(\"projection time : \" + str(time.time() - t))\n# backprojection\nx0 = obj4.copy()\nx0[:] = 0.\nt = time.time()\nx0 = tomograpy.backprojector4d(data, x0, obstacle=\"sun\")\nprint(\"backprojection time : \" + str(time.time() - t))\n\n# model\nkwargs = {'obj_rmin':1., #'obj_rmax':1.3,\n 'mask_negative':False, 'dt_min':100}\nP, D, obj_mask, data_mask = tomograpy.models.stsrt(data, obj, **kwargs)\n# hyperparameters\nhypers = (1e-1, 1e-1, 1e-1, 1e3)\n# test time for one projection\nb = data.ravel()\nt = time.time()\nu = P.T * b\nprint(\"time with index grouping : %f\" % ((time.time() - t)))\n# inversion\nt = time.time()\nsol = lo.acg(P, b, D, hypers, tol=1e-10, maxiter=100)\n# reshape result\nfsol = tomograpy.fa.asfitsarray(sol.reshape(obj_mask.shape), header=obj4.header)\nprint(time.time() - t)\n#fsol.tofits('stsrt_test.fits')\n", "exemples/test_siddon_simu_lo.py": "#!/usr/bin/env python\nimport time\nimport numpy as np\nimport tomograpy\nimport lo\n# object\nobj = tomograpy.centered_cubic_map(3, 32)\nobj[:] = tomograpy.phantom.shepp_logan(obj.shape)\n# data\nradius = 200.\na = tomograpy.fov(obj.header, radius)\ndata = tomograpy.centered_stack(a, 128, n_images=60, radius=radius,\n max_lon=np.pi)\n# projector\nP = tomograpy.lo(data.header, obj.header)\n# projection\nt = time.time()\ndata = tomograpy.projector(data, obj)\nprint(\"projection time : \" + str(time.time() - t))\n# data\ny = data.flatten()\n# backprojection\nt = time.time()\nx0 = P.T * y\nbpj = x0.reshape(obj.shape)\nprint(\"projection time : \" + str(time.time() - t))\n# priors\nDs = [lo.diff(obj.shape, axis=i) for i in xrange(3)]\n# inversion using scipy.sparse.linalg\nt = time.time()\nsol = lo.acg(P, y, Ds, 1e-2 * np.ones(3), maxiter=100, tol=1e-20)\nsol = sol.reshape(bpj.shape)\nprint(\"inversion time : \" + str(time.time() - t))\n", "exemples/test_siddon_simu_sun.py": "#!/usr/bin/env python\nimport time\nimport numpy as np\nimport tomograpy\n# object\nobj = tomograpy.centered_cubic_map(3, 128, fill=1.)\n# data\nradius = 200.\na = tomograpy.fov(obj.header, radius)\ndata = tomograpy.centered_stack(a, 128, n_images=17, radius=200., max_lon=np.pi)\n# projection\nt = time.time()\ndata = tomograpy.projector(data, obj, obstacle=\"sun\")\nprint(\"projection time : \" + str(time.time() - t))\n# backprojection\nobj0 = tomograpy.centered_cubic_map(3, 128, fill=0.)\nt = time.time()\nobj0 = tomograpy.backprojector(data, obj0, obstacle=\"sun\")\nprint(\"backprojection time : \" + str(time.time() - t))\n", "exemples/test_thomson_simu.py": "#!/usr/bin/env python\nimport time\nimport numpy as np\nimport tomograpy\nimport lo\n# object\nobj = tomograpy.centered_cubic_map(10, 64)\nobj[:] = tomograpy.phantom.shepp_logan(obj.shape)\n# data \nradius = 200\na = tomograpy.fov(obj, radius)\ndata = tomograpy.centered_stack(a, 128, n_images=60, radius=radius, max_lon=np.pi)\n# model\nkwargs = {\"pb\":\"pb\", \"obj_rmin\":1.5, \"data_rmin\":1.5}\nP, D, obj_mask, data_mask = tomograpy.models.thomson(data, obj, u=.5, **kwargs)\n# projection\nt = time.time()\ndata[:] = (P * obj.ravel()).reshape(data.shape)\nprint(\"projection time : \" + str(time.time() - t))\n# data\n# backprojection\nt = time.time()\nx0 = P.T * data.ravel()\nbpj = x0.reshape(obj.shape)\nprint(\"backprojection time : \" + str(time.time() - t))\n# inversion using scipy.sparse.linalg\nt = time.time()\nsol = lo.acg(P, data.ravel(), D, 1e-3 * np.ones(3), maxiter=100, tol=1e-8)\nsol = sol.reshape(obj.shape)\nprint(\"inversion time : \" + str(time.time() - t))\n"}, "files_after": {"experiments.py": "import matplotlib.pyplot as plt\nimport numpy as np\nimport pylops\n\nfrom tomograpy import project_3d, backproject_3d\n\nif __name__ == \"__main__\":\n print(\"Test started\")\n\n img_size = 50\n cube_size = 50\n x, y = np.meshgrid(np.arange(img_size, dtype=np.float32), np.arange(img_size, dtype=np.float32))\n z = np.zeros((img_size, img_size), dtype=np.float32) + 3\n densities = np.ones((cube_size, cube_size, cube_size), dtype=np.float32)\n # densities[1, 4, 1] = 100\n densities[2, 2, 2] = 5\n densities[8, 8, 2] = 8\n densities[10:30, 10:30, 0:30] = 3\n mask = np.ones((cube_size, cube_size, cube_size), dtype=bool)\n\n #b = (0.5, 0.5, 0.5)\n b = (0, 0, 0)\n #b = (1E-8, 1E-8, 1E-8)\n #b = (1, 1, 1)\n #b = (10, 10, 10)\n delta = (1, 1, 1)\n #unit_normal = (-1.0, -1E-8, -1E-8)\n #unit_normal = (1.0, 1E-8, 1E-8)\n #unit_normal = (-1, 0, 0)\n #unit_normal = (-1/np.sqrt(3), -1/np.sqrt(3), -1/np.sqrt(3))\n\n\n unit_normal = (1E-7, 1E-7, -1.0)\n path_distance = 500\n\n result = project_3d(x, y, z, densities, mask, b, delta, unit_normal, path_distance)\n # print(result)\n\n import pylops\n\n def forward(densities):\n return project_3d(x, y, z, densities.reshape((cube_size, cube_size, cube_size)).astype(np.float32),\n mask, b, delta, unit_normal, path_distance).reshape((img_size**2))\n\n def backward(image):\n return backproject_3d(x, y, z, image.reshape((img_size, img_size)).astype(np.float32),\n np.zeros_like(densities, dtype=np.float32),\n mask, b, delta, unit_normal, path_distance, True).reshape((cube_size**3))\n\n def test_forward(xarr):\n print(xarr)\n return forward(xarr)\n\n from pylops.basicoperators import FunctionOperator\n\n op = pylops.FunctionOperator(forward, backward, img_size**2, cube_size**3)\n out = op @ densities.reshape((cube_size**3))\n\n fig, ax = plt.subplots()\n ax.imshow(out.reshape((img_size, img_size)))\n fig.show()\n # print(out.shape)\n # image = 2*np.ones((5, 5), dtype=np.float32)\n # image[2, 2] = 10\n\n image = out.reshape((img_size, img_size))\n\n\n import xarray as xr\n\n da = xr.DataArray(\n data=image.reshape((img_size*img_size)),\n dims = [\"x\"],\n coords = dict(\n lon=([\"x\"], x.reshape((img_size*img_size)))\n ),\n # dims=[\"x\", \"y\"],\n # coords=dict(\n # lon=([\"x\", \"y\"], x.reshape((img_size*img_size))),\n # lat=([\"x\", \"y\"], y.reshape((img_size*img_size))),\n # ),\n attrs=dict(\n description=\"Ambient temperature.\",\n units=\"degC\",\n ),\n )\n\n\n xinv = op / image.reshape((img_size*img_size))\n\n # xinv = pylops.optimization.leastsquares.regularized_inversion(\n # op, image.reshape((img_size*img_size)), [], **dict(damp=0, iter_lim=10, show=True, atol=1E-8, btol=1E-8)\n # )[0]\n # print(xinv.reshape((10, 10, 10)))\n\n fig, axs = plt.subplots(ncols=2, nrows=3, figsize=(10, 14))\n im = axs[0, 0].imshow(densities[:, :, 2], vmin=0, vmax=3)\n axs[0, 0].set_title(\"Input density slice\")\n fig.colorbar(im)\n\n im = axs[1, 0].imshow(xinv.reshape((cube_size, cube_size, cube_size))[:, :, 2], vmin=0, vmax=3)\n axs[1, 0].set_title(\"Reconstructed density slice\")\n fig.colorbar(im)\n\n im = axs[2, 0].imshow(densities[:, :, 2] - xinv.reshape((cube_size, cube_size, cube_size))[:, :, 2], vmin=-3, vmax=3, cmap='seismic')\n axs[2, 0].set_title(\"Input - reconstruction\")\n fig.colorbar(im)\n\n im = axs[1, 1].imshow(image, vmin=0, vmax=100)\n axs[1, 1].set_title(\"Image used in reconstruction\")\n fig.colorbar(im)\n\n axs[0, 1].set_axis_off()\n axs[2, 1].set_axis_off()\n plt.show()\n # BACKPROJECT\n # image = 2*np.ones((15, 15), dtype=np.float32)\n # result = backproject_3d(x, y, z, image,\n # np.zeros_like(densities, dtype=np.float32),\n # mask, b, delta, unit_normal, path_distance, True)\n # print(\"cube\", result)\n #\n # import matplotlib as mpl\n # import matplotlib.pyplot as plt\n #\n # mpl.use('macosx')\n #\n # fig = plt.figure()\n # ax = fig.add_subplot(projection='3d')\n # x, y, z = np.where(result > 1)\n # ax.scatter(x, y, z)\n #\n # plt.show(block=True)\n\n print(\"Test ended\")", "experiments_dottest.py": "import matplotlib.pyplot as plt\nimport numpy as np\nimport pylops\nfrom datetime import datetime\n\nfrom tomograpy import project_3d, backproject_3d\nfrom pylops.basicoperators import FunctionOperator\nfrom pylops import LinearOperator, lsqr\n\n\ndef z_rotation_matrix_3d(angle):\n return np.array([[[np.cos(angle), np.sin(angle), 0],\n [-np.sin(angle), np.cos(angle), 0],\n [0, 0, 1]]])\n\n\nif __name__ == \"__main__\":\n print(\"Test started\")\n start_time = datetime.now()\n\n angles = np.linspace(0, np.pi, 14)\n\n radius = 300\n\n img_size = 20\n cube_size = 20\n densities = np.zeros((cube_size, cube_size, cube_size), dtype=np.float32)\n densities[3:-3, 3:-3, 3:-3] = 100\n densities[7:-7, 7:-7, 7:-7] = 0\n # densities = np.random.randint(0, 100, (cube_size, cube_size, cube_size)).astype(np.float32)\n # densities[5, 5, :] = 0\n\n mask = np.ones((cube_size, cube_size, cube_size), dtype=bool)\n\n b = (0, 0, 0)\n b = (-cube_size / 2, -cube_size / 2, -cube_size / 2)\n\n delta = (1.0, 1.0, 1.0)\n path_distance = 500.0\n\n norms, xs, ys, zs, ds, imgs = [], [], [], [], [], []\n for angle in angles:\n t_angle = -angle + np.pi/2\n\n img_x = np.arange(img_size) - img_size / 2\n img_y = np.zeros((img_size, img_size))\n img_z = np.arange(img_size) - img_size / 2\n img_x, img_z = np.meshgrid(img_x, img_z)\n\n img_x, img_y, img_z = img_x.flatten(), img_y.flatten(), img_z.flatten()\n\n R = z_rotation_matrix_3d(t_angle)\n coords = (R @ np.stack([img_x, img_y, img_z]))[0]\n img_x, img_y, img_z = coords[0], coords[1], coords[2]\n img_x = radius * np.cos(angle) + img_x\n img_y = radius * np.sin(angle) + img_y\n\n xx = img_x.reshape((img_size, img_size)).astype(np.float32)\n yy = -img_y.reshape((img_size, img_size)).astype(np.float32)\n zz = img_z.reshape((img_size, img_size)).astype(np.float32)\n\n v1 = np.array([xx[0, 1] - xx[0, 0], yy[0, 1] - yy[0, 0], zz[0, 1] - zz[0, 0]])\n v2 = np.array([xx[1, 0] - xx[0, 0], yy[1, 0] - yy[0, 0], zz[1, 0] - zz[0, 0]])\n v1 = v1 / np.linalg.norm(v1)\n v2 = v2 / np.linalg.norm(v2)\n normal = np.cross(v1, v2)\n normal = normal / np.linalg.norm(normal)\n\n norm = normal\n norm[norm == 0] = 1E-6\n norms.append(norm)\n print(np.rad2deg(angle), np.rad2deg(t_angle), norm)\n\n d = 500\n img = project_3d(xx, yy, zz, densities, mask, b, delta, norm, d)\n xs.append(xx)\n ys.append(yy)\n zs.append(zz)\n ds.append(d)\n imgs.append(img.astype(np.float32))\n imgs = np.array(imgs)\n\n # show\n for angle, img in zip(angles, imgs):\n fig, ax = plt.subplots()\n im = ax.imshow(img)\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_dots/{int(np.rad2deg(angle)):03d}.png\")\n plt.close()\n\n\n class Tomo(LinearOperator):\n def __init__(self, xs, ys, zs, norms, ds, b, delta, model_shape, mask, dtype=None):\n self.xs = xs\n self.ys = ys\n self.zs = zs\n self.norms = norms\n self.ds = ds\n self.b = b\n self.delta = delta\n self.model_shape = model_shape\n self.mask = mask\n super().__init__(dtype=np.dtype(dtype),\n dims=self.model_shape,\n dimsd=(1, self.xs[0].shape[0], self.xs[0].shape[1]))\n # dimsd=(len(self.xs), self.xs[0].shape[0], self.xs[0].shape[1]))\n #\n # def _matvec(self, densities):\n # return np.array([project_3d(x, y, z, densities.reshape(self.model_shape).astype(np.float32),\n # self.mask, self.b, self.delta, norm, d)\n # for x, y, z, norm, d in zip(self.xs, self.ys, self.zs, self.norms, self.ds)]).flatten()\n #\n # def _rmatvec(self, imgs):\n # densitiesi = np.zeros(self.model_shape, dtype=np.float32)\n # for i, img in enumerate(imgs.reshape(len(self.xs), self.xs[0].shape[0], self.xs[0].shape[1])):\n # # densitiesi += backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n # # densitiesi,\n # # self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n # densitiesi = backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n # densitiesi,\n # self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n # # return ((densitiesi - np.sum(img)) / (len(self.xs)-1)).astype(np.float32)\n # return densitiesi / (cube_size - 1) / (len(self.xs) / 2 + 1)#/ len(self.xs) #/ densitiesi.shape[0] / len(self.xs)\n # #return densitiesi.flatten().astype(np.float32)\n\n def _matvec(self, densities):\n # return np.array([project_3d(x, y, z, densities.reshape(self.model_shape).astype(np.float32),\n # self.mask, self.b, self.delta, norm, d)\n # for x, y, z, norm, d in zip(self.xs, self.ys, self.zs, self.norms, self.ds)]).flatten()\n i = 0\n return project_3d(self.xs[i], self.ys[i], self.zs[i], densities.reshape(self.model_shape).astype(np.float32), self.mask, self.b, self.delta, self.norms[i], self.ds[i]).flatten()\n\n def _rmatvec(self, imgs):\n densitiesi = np.zeros(self.model_shape, dtype=np.float32)\n for i, img in enumerate(imgs.reshape(1, self.xs[0].shape[0], self.xs[0].shape[1])):\n # densitiesi += backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n # densitiesi,\n # self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n # if i == 0:\n densitiesi += backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n np.zeros_like(densitiesi).astype(np.float32),\n self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n # return ((densitiesi - np.sum(img)) / (len(self.xs)-1)).astype(np.float32)\n #return (densitiesi / (cube_size - 1)).flatten() #/ densitiesi.shape[0] / len(self.xs)\n return densitiesi.flatten().astype(np.float32)\n\n print(len(xs))\n op = Tomo(xs, ys, zs, norms, ds, b, delta, densities.shape, mask, dtype=np.float32)\n\n proj = op @ densities.flatten()\n densities_bpj = op.H @ proj.flatten()\n proj2 = op @ densities_bpj.flatten()\n densities_bpj2 = op.H @ proj2.flatten()\n print(np.where(proj == -999))\n\n #print(np.conj(proj).T @ (op @ densities))\n\n print(\"maxs\", np.max(proj), np.max(proj2))\n print(\"pcts\", np.nanpercentile(densities, 95), np.nanpercentile(densities_bpj, 95), np.nanpercentile(densities_bpj2, 95))\n print(\"close\", np.allclose(densities_bpj, densities_bpj2))\n\n fig, axs = plt.subplots(ncols=2)\n im = axs[0].imshow(proj.reshape((img_size, img_size)), vmin=0, vmax=1300)\n fig.colorbar(im)\n im = axs[1].imshow(proj2.reshape((img_size, img_size))/ (cube_size-1))#, vmin=0, vmax=1300)\n fig.colorbar(im)\n fig.savefig(\"/Users/jhughes/Desktop/projection_dots/comparison.png\")\n\n fig, axs = plt.subplots(ncols=3)\n im = axs[0].imshow(densities.reshape((cube_size, cube_size, cube_size))[0], vmin=0, vmax=100)\n fig.colorbar(im)\n im = axs[1].imshow(densities_bpj.reshape((cube_size, cube_size, cube_size))[0])# , vmin=0, vmax=100)\n fig.colorbar(im)\n im = axs[2].imshow(densities_bpj2.reshape((cube_size, cube_size, cube_size))[0])# , vmin=0, vmax=100)\n fig.colorbar(im)\n fig.savefig(\"/Users/jhughes/Desktop/projection_dots/comparison_dense.png\")\n\n\n from pylops.utils import dottest\n _ = dottest(op, 400, 8000, atol=0.1, complexflag=0, verb=True)\n\n model = pylops.optimization.basic.lsqr(op, imgs[0].flatten(), niter=10, show=True)[0]\n #\n # model = model.reshape(densities.shape)\n #\n # limit = np.nanpercentile(model, 95)\n #\n # for i in range(cube_size):\n # fig, axs = plt.subplots(ncols=2)\n # im = axs[0].imshow(densities[:, :, i], vmin=0, vmax=150)#, vmin=0, vmax=5)\n # fig.colorbar(im)\n # im = axs[1].imshow(model[:, :, i], vmin=0, vmax=limit)\n # fig.colorbar(im)\n # fig.show()\n # fig.savefig(f\"/Users/jhughes/Desktop/projection_dots/test_{i:03d}.png\")\n", "experiments_filtered_bpj.py": "import matplotlib.pyplot as plt\nimport numpy as np\nimport pylops\nimport scipy.fftpack as fp\nfrom tomograpy import project_3d, backproject_3d\n\n\ndef z_rotation_matrix_3d(angle):\n return np.array([[[np.cos(angle), np.sin(angle), 0],\n [-np.sin(angle), np.cos(angle), 0],\n [0, 0, 1]]])\n\n\nangles = np.linspace(0, 2*np.pi, 50)\n\nradius = 500\n\nimg_size = 100\ncube_size = 100\n# x, y = np.meshgrid(np.arange(img_size, dtype=np.float32), np.arange(img_size, dtype=np.float32))\n# z = np.zeros((img_size, img_size), dtype=np.float32)\ndensities = np.zeros((cube_size, cube_size, cube_size), dtype=np.float32)\n# densities[1:30, 1:30, 1:30] = 25\n# densities[80:90, 80:90, 80:90] = 5\ndensities[10:-10, 10:-10, 10:-10] = 100\ndensities[30:-30, 30:-30, 30:-30] = 0\n\n\n# densities[2, 2, 2] = 5\n# densities[8, 8, 2] = 8\n# densities[10:30, 10:30, 0:30] = 3\nmask = np.ones((cube_size, cube_size, cube_size), dtype=bool)\n\nb = (0, 0, 0)\nb = (-cube_size / 2, -cube_size / 2, -cube_size / 2)\n\ndelta = (1.0, 1.0, 1.0)\n# unit_normal = (1E-7, 1E-7, 1.0)\npath_distance = 500.0\n\ntotal = np.zeros_like(densities)\n\nfor angle in angles:\n t_angle = -angle + np.pi / 2 # np.deg2rad(np.abs(90*np.cos(angle)))\n\n img_x = np.arange(img_size) - img_size / 2\n img_y = np.zeros((img_size, img_size))\n img_z = np.arange(img_size) - img_size / 2\n img_x, img_z = np.meshgrid(img_x, img_z)\n\n img_x, img_y, img_z = img_x.flatten(), img_y.flatten(), img_z.flatten()\n\n R = z_rotation_matrix_3d(t_angle)\n coords = (R @ np.stack([img_x, img_y, img_z]))[0]\n img_x, img_y, img_z = coords[0], coords[1], coords[2]\n img_x = radius * np.cos(angle) + img_x # - img_size/2\n img_y = radius * np.sin(angle) + img_y # - img_size/2\n\n xx = img_x.reshape((img_size, img_size)).astype(np.float32)\n yy = -img_y.reshape((img_size, img_size)).astype(np.float32)\n zz = img_z.reshape((img_size, img_size)).astype(np.float32)\n\n v1 = np.array([xx[0, 1] - xx[0, 0], yy[0, 1] - yy[0, 0], zz[0, 1] - zz[0, 0]])\n v2 = np.array([xx[1, 0] - xx[0, 0], yy[1, 0] - yy[0, 0], zz[1, 0] - zz[0, 0]])\n v1 = v1 / np.linalg.norm(v1)\n v2 = v2 / np.linalg.norm(v2)\n normal = np.cross(v1, v2)\n normal = normal / np.linalg.norm(normal)\n\n norm = normal # R @ np.array([1E-7, 1E-7, -1.0])\n norm[norm == 0] = 1E-6\n print(np.rad2deg(angle), np.rad2deg(t_angle), norm)\n\n proj = project_3d(xx, yy, zz, densities, mask, b, delta, norm, path_distance)\n\n fig, ax = plt.subplots()\n im = ax.imshow(proj, origin='lower')\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_bpj/proj_{angle:0.3f}.png\")\n plt.close()\n\n # FILTERING\n # (w, h) = proj.shape\n # half_w, half_h = int(w / 2), int(h / 2)\n #\n # F1 = fp.fft2(proj.astype(float))\n # F2 = fp.fftshift(F1)\n #\n # # high pass filter\n # n = 10\n # F2[half_w - n:half_w + n + 1, half_h - n:half_h + n + 1] = 0\n # proj = fp.ifft2(fp.ifftshift(F2)).real\n\n result = backproject_3d(xx, yy, zz, proj.astype(np.float32),\n np.zeros_like(densities, dtype=np.float32),\n mask, b, delta, norm, path_distance, True)\n print(result.shape)\n\n scaling = densities.shape[0]\n result = result / scaling\n\n total += result\n# ((result - np.sum(proj)) / (len(self.xs)-1)).astype(np.float32)\n#result = result - np.sum(proj)\n\n# # Scale result to make sure that fbp(A, A(x)) == x holds at least\n# # to some approximation. In limited experiments, this is true for\n# # this version of FBP up to 1%.\n# # *Note*: For some reason, we do not have to scale with respect to\n# # the pixel dimension that is orthogonal to the rotation axis (`u`\n# # or horizontal pixel dimension). Hence, we only scale with the\n# # other pixel dimension (`v` or vertical pixel dimension).\n# vg, pg = A.astra_compat_vg, A.astra_compat_pg\n#\n# pixel_height = (pg.det_size[0] / pg.det_shape[0])\n# voxel_volume = np.prod(np.array(vg.size / np.array(vg.shape)))\n# scaling = (np.pi / pg.num_angles) * pixel_height / voxel_volume\n#\n# rec *= scaling\n# pixel_height = proj.size / proj.shape[0]\n# voxel_volume = np.prod(densities.size / np.array(densities.shape))\n# scaling = (np.pi / 1) * pixel_height / voxel_volume\n\ntotal /= len(angles)\n\nfor i in range(cube_size):\n fig, axs = plt.subplots(ncols=2)\n im = axs[0].imshow(densities[i, :, :], origin='lower', vmin=0, vmax=150)\n fig.colorbar(im)\n\n im = axs[1].imshow(total[i, :, :], origin='lower')#, vmin=0, vmax=150)\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_bpj/{i:03d}.png\")\n plt.close()\n", "experiments_multi_view.py": "import matplotlib.pyplot as plt\nimport numpy as np\nimport pylops\nfrom datetime import datetime\n\nfrom tomograpy import project_3d, backproject_3d\nfrom pylops.basicoperators import FunctionOperator\nfrom pylops import LinearOperator, lsqr\n\n\ndef z_rotation_matrix_3d(angle):\n return np.array([[[np.cos(angle), np.sin(angle), 0],\n [-np.sin(angle), np.cos(angle), 0],\n [0, 0, 1]]])\n\n\nif __name__ == \"__main__\":\n print(\"Test started\")\n start_time = datetime.now()\n\n angles = np.linspace(0, np.pi, 14) + np.pi/60\n\n radius = 300\n\n img_size = 100\n cube_size = 100\n # x, y = np.meshgrid(np.arange(img_size, dtype=np.float32), np.arange(img_size, dtype=np.float32))\n # z = np.zeros((img_size, img_size), dtype=np.float32)\n densities = np.zeros((cube_size, cube_size, cube_size), dtype=np.float32)\n # densities[1:30, 1:30, 1:30] = 25\n # densities[80:90, 80:90, 80:90] = 5\n densities[30:-30, 30:-30, 30:-30] = 100\n densities[40:-40, 40:-40, 40:-40] = 0\n\n # densities[2, 2, 2] = 5\n # densities[8, 8, 2] = 8\n # densities[10:30, 10:30, 0:30] = 3\n mask = np.ones((cube_size, cube_size, cube_size), dtype=bool)\n\n b = (0, 0, 0)\n b = (-cube_size / 2, -cube_size / 2, -cube_size / 2)\n\n delta = (1.0, 1.0, 1.0)\n # delta = (0.5, 0.5, 0.5)\n # unit_normal = (1E-7, 1E-7, 1.0)\n path_distance = 500.0\n\n norms, xs, ys, zs, ds, imgs = [], [], [], [], [], []\n #angles = np.array([0, 30])\n for angle in angles:\n t_angle = -angle + np.pi/2 #np.deg2rad(np.abs(90*np.cos(angle)))\n\n img_x = np.arange(img_size) - img_size / 2\n img_y = np.zeros((img_size, img_size))\n img_z = np.arange(img_size) - img_size / 2\n img_x, img_z = np.meshgrid(img_x, img_z)\n\n img_x, img_y, img_z = img_x.flatten(), img_y.flatten(), img_z.flatten()\n\n R = z_rotation_matrix_3d(t_angle)\n coords = (R @ np.stack([img_x, img_y, img_z]))[0]\n img_x, img_y, img_z = coords[0], coords[1], coords[2]\n img_x = radius * np.cos(angle) + img_x # - img_size/2\n img_y = radius * np.sin(angle) + img_y # - img_size/2\n\n xx = img_x.reshape((img_size, img_size)).astype(np.float32)\n yy = -img_y.reshape((img_size, img_size)).astype(np.float32)\n zz = img_z.reshape((img_size, img_size)).astype(np.float32)\n\n v1 = np.array([xx[0, 1] - xx[0, 0], yy[0, 1] - yy[0, 0], zz[0, 1] - zz[0, 0]])\n v2 = np.array([xx[1, 0] - xx[0, 0], yy[1, 0] - yy[0, 0], zz[1, 0] - zz[0, 0]])\n v1 = v1 / np.linalg.norm(v1)\n v2 = v2 / np.linalg.norm(v2)\n normal = np.cross(v1, v2)\n normal = normal / np.linalg.norm(normal)\n\n norm = normal # R @ np.array([1E-7, 1E-7, -1.0])\n norm[norm == 0] = 1E-6\n norms.append(norm)\n print(np.rad2deg(angle), np.rad2deg(t_angle), norm)\n\n d = 500\n img = project_3d(xx, yy, zz, densities, mask, b, delta, norm, d)\n xs.append(xx)\n ys.append(yy)\n zs.append(zz)\n ds.append(d)\n imgs.append(img.astype(np.float32))\n imgs = np.array(imgs)\n\n # show\n for angle, img in zip(angles, imgs):\n fig, ax = plt.subplots()\n im = ax.imshow(img)\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_test/{int(np.rad2deg(angle)):03d}.png\")\n plt.close()\n #\n # model = np.zeros(densities.shape, dtype=np.float32)\n # for i, img in enumerate(imgs.reshape(len(xs), xs[0].shape[0], xs[0].shape[1])):\n # model = backproject_3d(xs[i], ys[i], zs[i], img,\n # model,\n # mask, b, delta, norms[i], ds[i], True)\n # total = np.sum(img)\n # print(total, img.shape, imgs.shape)\n # model = ((model - total) / (len(imgs) - 1)).astype(np.float32)\n\n # # # 3d plot\n # fig = plt.figure()\n # ax = fig.add_subplot(projection='3d')\n #\n # n = 100\n #\n # for x, y, z in zip(xs, ys, zs):\n # ax.scatter(x, y, z)\n #\n # # for norm, sx, sy in zip(norms, shift_xs, shift_ys):\n # # print(\"TEST\", norm)\n # # ax.plot([0+sx, 100*norm[0]+sx], [0, 100*norm[1]], [0+sy, sy+100*norm[2]])\n #\n # ax.set_xlabel('X Label')\n # ax.set_ylabel('Y Label')\n # ax.set_zlabel('Z Label')\n #\n # plt.show()\n\n\n class Tomo(LinearOperator):\n def __init__(self, xs, ys, zs, norms, ds, b, delta, model_shape, mask, dtype=None):\n self.xs = xs\n self.ys = ys\n self.zs = zs\n self.norms = norms\n self.ds = ds\n self.b = b\n self.delta = delta\n self.model_shape = model_shape\n self.mask = mask\n super().__init__(dtype=np.dtype(dtype),\n dims=self.model_shape,\n dimsd=(len(self.xs), self.xs[0].shape[0], self.xs[0].shape[1]))\n\n def _matvec(self, densities):\n return np.array([project_3d(x, y, z, densities.reshape(self.model_shape).astype(np.float32),\n self.mask, self.b, self.delta, norm, d)\n for x, y, z, norm, d in zip(self.xs, self.ys, self.zs, self.norms, self.ds)]).flatten()\n\n def _rmatvec(self, imgs):\n densitiesi = np.zeros(self.model_shape, dtype=np.float32)\n for i, img in enumerate(imgs.reshape(len(self.xs), self.xs[0].shape[0], self.xs[0].shape[1])):\n # densitiesi += backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n # densitiesi,\n # self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n densitiesi += backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n np.zeros_like(densitiesi).astype(np.float32),\n self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n # return ((densitiesi - np.sum(img)) / (len(self.xs)-1)).astype(np.float32)\n return densitiesi.flatten().astype(np.float32) #/ densitiesi.shape[0] / len(self.xs)\n #return densitiesi.flatten().astype(np.float32)\n\n print(len(xs))\n op = Tomo(xs, ys, zs, norms, ds, b, delta, densities.shape, mask, dtype=np.float32)\n\n from pylops.utils import dottest\n # # print(op.dims)\n _ = dottest(op, 140000, 1000000, atol=0.1, complexflag=0, verb=True)\n\n Dop = [\n pylops.FirstDerivative(\n (cube_size, cube_size, cube_size), axis=0, edge=False, kind=\"backward\", dtype=np.float32\n ),\n pylops.FirstDerivative(\n (cube_size, cube_size, cube_size), axis=1, edge=False, kind=\"backward\", dtype=np.float32\n ),\n pylops.FirstDerivative(\n (cube_size, cube_size, cube_size), axis=2, edge=False, kind=\"backward\", dtype=np.float32\n )\n ]\n #\n # # TV\n # mu = 1.5\n # lamda = [0.1, 0.1, 0.1]\n # niter_out = 2\n # niter_in = 1\n #\n # model = pylops.optimization.sparsity.splitbregman(\n # op,\n # imgs.ravel(),\n # Dop,\n # niter_outer=niter_out,\n # niter_inner=niter_in,\n # mu=mu,\n # epsRL1s=lamda,\n # tol=1e-4,\n # tau=1.0,\n # show=True,\n # **dict(iter_lim=5, damp=1e-4)\n # )[0]\n\n # model = op / imgs.flatten()\n # model = pylops.optimization.leastsquares.regularized_inversion(\n # op, imgs.flatten(), Dop, **dict(iter_lim=10, show=True, atol=1E-8, btol=1E-8)\n # )[0]\n #\n # model = pylops.optimization.basic.lsqr(op, imgs.flatten(), x0=np.random.rand(*densities.flatten().shape).astype(np.float32),\n # niter=100, show=True, damp=0)[0]\n model = pylops.optimization.basic.lsqr(op, imgs.flatten(), niter=10, show=True)[0]#, x0 = np.random.randint(0, 30, densities.shape))[0] # x0=densities.flatten() + 50 * np.random.rand(*densities.flatten().shape).astype(np.float32) - 25)[0]\n # niter=100, show=True, damp=0)[0]\n #model = pylops.optimization.basic.lsqr(op, imgs.flatten(), x0=densities.flatten(), niter=10, show=True)[0]\n # model = pylops.optimization.basic.lsqr(op, imgs.flatten(), niter=1, show=True)[0]\n\n # from pylops.optimization.cls_sparsity import FISTA\n #\n # fistasolver = FISTA(op)\n #\n # model = fistasolver.solve(imgs.flatten(), niter=2, show=True)[0]\n\n #model = pylops.optimization.basic.cgls(op, imgs.flatten(), niter=10, show=True)[0]\n model = model.reshape(densities.shape)\n\n limit = 150 # np.nanpercentile(model, 95)\n\n for i in range(cube_size):\n fig, axs = plt.subplots(ncols=2)\n im = axs[0].imshow(densities[:, :, i], vmin=0, vmax=150)#, vmin=0, vmax=5)\n fig.colorbar(im)\n im = axs[1].imshow(model[:, :, i], vmin=0, vmax=limit)\n fig.colorbar(im)\n fig.show()\n fig.savefig(f\"/Users/jhughes/Desktop/projection_test/test_{i:03d}.png\")\n\n # fig, axs = plt.subplots(ncols=2)\n # axs[0].imshow(densities[:, :, 2], vmin=0, vmax=10)\n # axs[1].imshow(model[:, :, 2], vmin=0, vmax=10)\n # fig.show()\n # fig.savefig(\"/Users/jhughes/Desktop/projection_test/reconstruction_0.png\")\n #\n # fig, axs = plt.subplots(ncols=2)\n # axs[0].imshow(densities[:, :, 10], vmin=0, vmax=10)\n # axs[1].imshow(model[:, :, 10], vmin=0, vmax=10)\n # fig.show()\n # fig.savefig(\"/Users/jhughes/Desktop/projection_test/reconstruction_1.png\")\n #\n #\n # fig, axs = plt.subplots(ncols=2)\n # axs[0].imshow(densities[:, :, 30], vmin=0, vmax=10)\n # axs[1].imshow(model[:, :, 30], vmin=0, vmax=10)\n # fig.show()\n # fig.savefig(\"/Users/jhughes/Desktop/projection_test/reconstruction_2.png\")\n #\n #\n # fig, axs = plt.subplots(ncols=2)\n # axs[0].imshow(densities[:, :, 40], vmin=0, vmax=10)\n # axs[1].imshow(model[:, :, 40], vmin=0, vmax=10)\n # fig.show()\n # fig.savefig(\"/Users/jhughes/Desktop/projection_test/reconstruction_3.png\")\n\n end_time = datetime.now()\n print(end_time - start_time)\n", "experiments_only_bpj.py": "import matplotlib.pyplot as plt\nimport numpy as np\nimport pylops\n\nfrom tomograpy import project_3d, backproject_3d\n\n\ndef z_rotation_matrix_3d(angle):\n return np.array([[[np.cos(angle), np.sin(angle), 0],\n [-np.sin(angle), np.cos(angle), 0],\n [0, 0, 1]]])\n\n\nangles = np.linspace(0, np.pi, 14) + np.pi/60\n\nradius = 300\n\nimg_size = 100\ncube_size = 100\n# x, y = np.meshgrid(np.arange(img_size, dtype=np.float32), np.arange(img_size, dtype=np.float32))\n# z = np.zeros((img_size, img_size), dtype=np.float32)\ndensities = np.zeros((cube_size, cube_size, cube_size), dtype=np.float32)\n# densities[1:30, 1:30, 1:30] = 25\n# densities[80:90, 80:90, 80:90] = 5\ndensities[30:-30, 30:-30, 30:-30] = 100\ndensities[40:-40, 40:-40, 40:-40] = 0\n\n\n# densities[2, 2, 2] = 5\n# densities[8, 8, 2] = 8\n# densities[10:30, 10:30, 0:30] = 3\nmask = np.ones((cube_size, cube_size, cube_size), dtype=bool)\n\nb = (0, 0, 0)\nb = (-cube_size / 2, -cube_size / 2, -cube_size / 2)\n\ndelta = (1.0, 1.0, 1.0)\n# unit_normal = (1E-7, 1E-7, 1.0)\npath_distance = 500.0\n\ntotal = np.zeros_like(densities)\n\nfor angle in angles:\n t_angle = -angle + np.pi / 2 # np.deg2rad(np.abs(90*np.cos(angle)))\n\n img_x = np.arange(img_size) - img_size / 2\n img_y = np.zeros((img_size, img_size))\n img_z = np.arange(img_size) - img_size / 2\n img_x, img_z = np.meshgrid(img_x, img_z)\n\n img_x, img_y, img_z = img_x.flatten(), img_y.flatten(), img_z.flatten()\n\n R = z_rotation_matrix_3d(t_angle)\n coords = (R @ np.stack([img_x, img_y, img_z]))[0]\n img_x, img_y, img_z = coords[0], coords[1], coords[2]\n img_x = radius * np.cos(angle) + img_x # - img_size/2\n img_y = radius * np.sin(angle) + img_y # - img_size/2\n\n xx = img_x.reshape((img_size, img_size)).astype(np.float32)\n yy = -img_y.reshape((img_size, img_size)).astype(np.float32)\n zz = img_z.reshape((img_size, img_size)).astype(np.float32)\n\n v1 = np.array([xx[0, 1] - xx[0, 0], yy[0, 1] - yy[0, 0], zz[0, 1] - zz[0, 0]])\n v2 = np.array([xx[1, 0] - xx[0, 0], yy[1, 0] - yy[0, 0], zz[1, 0] - zz[0, 0]])\n v1 = v1 / np.linalg.norm(v1)\n v2 = v2 / np.linalg.norm(v2)\n normal = np.cross(v1, v2)\n normal = normal / np.linalg.norm(normal)\n\n norm = normal # R @ np.array([1E-7, 1E-7, -1.0])\n norm[norm == 0] = 1E-6\n print(np.rad2deg(angle), np.rad2deg(t_angle), norm)\n\n proj = project_3d(xx, yy, zz, densities, mask, b, delta, norm, path_distance)\n\n fig, ax = plt.subplots()\n im = ax.imshow(proj, origin='lower')\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_bpj/proj_{angle:0.3f}.png\")\n plt.close()\n\n result = backproject_3d(xx, yy, zz, proj.astype(np.float32),\n np.zeros_like(densities, dtype=np.float32),\n mask, b, delta, norm, path_distance, True)\n print(result.shape)\n\n scaling = densities.shape[0]\n result = result / scaling\n\n total += result\n# ((result - np.sum(proj)) / (len(self.xs)-1)).astype(np.float32)\n#result = result - np.sum(proj)\n\n# # Scale result to make sure that fbp(A, A(x)) == x holds at least\n# # to some approximation. In limited experiments, this is true for\n# # this version of FBP up to 1%.\n# # *Note*: For some reason, we do not have to scale with respect to\n# # the pixel dimension that is orthogonal to the rotation axis (`u`\n# # or horizontal pixel dimension). Hence, we only scale with the\n# # other pixel dimension (`v` or vertical pixel dimension).\n# vg, pg = A.astra_compat_vg, A.astra_compat_pg\n#\n# pixel_height = (pg.det_size[0] / pg.det_shape[0])\n# voxel_volume = np.prod(np.array(vg.size / np.array(vg.shape)))\n# scaling = (np.pi / pg.num_angles) * pixel_height / voxel_volume\n#\n# rec *= scaling\n# pixel_height = proj.size / proj.shape[0]\n# voxel_volume = np.prod(densities.size / np.array(densities.shape))\n# scaling = (np.pi / 1) * pixel_height / voxel_volume\n\ntotal /= len(angles)\n\nfor i in range(cube_size):\n fig, axs = plt.subplots(ncols=2)\n im = axs[0].imshow(densities[i, :, :], origin='lower', vmin=0, vmax=150)\n fig.colorbar(im)\n\n im = axs[1].imshow(total[i, :, :], origin='lower', vmin=0, vmax=150)\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_bpj/{i:03d}.png\")\n plt.close()\n", "experiments_single_view.py": "import matplotlib.pyplot as plt\nimport numpy as np\nimport pylops\n\nfrom tomograpy import project_3d, backproject_3d\n\n\ndef z_rotation_matrix_3d(angle):\n return np.array([[[np.cos(angle), np.sin(angle), 0],\n [-np.sin(angle), np.cos(angle), 0],\n [0, 0, 1]]])\n\nangle = np.pi/2\nradius = 100\n\nimg_size = 100\ncube_size = 100\n# x, y = np.meshgrid(np.arange(img_size, dtype=np.float32), np.arange(img_size, dtype=np.float32))\n# z = np.zeros((img_size, img_size), dtype=np.float32)\ndensities = np.zeros((cube_size, cube_size, cube_size), dtype=np.float32)\ndensities[1:30, 1:30, 1:30] = 25\ndensities[80:90, 80:90, 80:90] = 5\n\n# densities[2, 2, 2] = 5\n# densities[8, 8, 2] = 8\n# densities[10:30, 10:30, 0:30] = 3\nmask = np.ones((cube_size, cube_size, cube_size), dtype=bool)\n\nb = (0, 0, 0)\nb = (-cube_size / 2, -cube_size / 2, -cube_size / 2)\n\ndelta = (1.0, 1.0, 1.0)\n#unit_normal = (1E-7, 1E-7, 1.0)\npath_distance = 500.0\n\nt_angle = -angle + np.pi / 2 # np.deg2rad(np.abs(90*np.cos(angle)))\n\nimg_x = np.arange(img_size) - img_size / 2\nimg_y = np.zeros((img_size, img_size))\nimg_z = np.arange(img_size) - img_size / 2\nimg_x, img_z = np.meshgrid(img_x, img_z)\n\nimg_x, img_y, img_z = img_x.flatten(), img_y.flatten(), img_z.flatten()\n\nR = z_rotation_matrix_3d(t_angle)\ncoords = (R @ np.stack([img_x, img_y, img_z]))[0]\nimg_x, img_y, img_z = coords[0], coords[1], coords[2]\nimg_x = radius * np.cos(angle) + img_x # - img_size/2\nimg_y = radius * np.sin(angle) + img_y # - img_size/2\n\nxx = img_x.reshape((img_size, img_size)).astype(np.float32)\nyy = -img_y.reshape((img_size, img_size)).astype(np.float32)\nzz = img_z.reshape((img_size, img_size)).astype(np.float32)\n\nv1 = np.array([xx[0, 1] - xx[0, 0], yy[0, 1] - yy[0, 0], zz[0, 1] - zz[0, 0]])\nv2 = np.array([xx[1, 0] - xx[0, 0], yy[1, 0] - yy[0, 0], zz[1, 0] - zz[0, 0]])\nv1 = v1 / np.linalg.norm(v1)\nv2 = v2 / np.linalg.norm(v2)\nnormal = np.cross(v1, v2)\nnormal = normal / np.linalg.norm(normal)\n\nnorm = normal # R @ np.array([1E-7, 1E-7, -1.0])\nnorm[norm == 0] = 1E-6\nprint(np.rad2deg(angle), np.rad2deg(t_angle), norm)\n\nproj = project_3d(xx, yy, zz, densities, mask, b, delta, norm, path_distance)\n\nfig, ax = plt.subplots()\nim = ax.imshow(proj, origin='lower')\nfig.colorbar(im)\nfig.savefig(\"/Users/jhughes/Desktop/projection_test2/proj.png\")\nplt.close()\n\nresult = backproject_3d(xx, yy, zz, proj.astype(np.float32),\n np.zeros_like(densities, dtype=np.float32),\n mask, b, delta, norm, path_distance, True)\nprint(result.shape)\n# ((result - np.sum(proj)) / (len(self.xs)-1)).astype(np.float32)\n#result = result - np.sum(proj)\n\n# # Scale result to make sure that fbp(A, A(x)) == x holds at least\n# # to some approximation. In limited experiments, this is true for\n# # this version of FBP up to 1%.\n# # *Note*: For some reason, we do not have to scale with respect to\n# # the pixel dimension that is orthogonal to the rotation axis (`u`\n# # or horizontal pixel dimension). Hence, we only scale with the\n# # other pixel dimension (`v` or vertical pixel dimension).\n# vg, pg = A.astra_compat_vg, A.astra_compat_pg\n#\n# pixel_height = (pg.det_size[0] / pg.det_shape[0])\n# voxel_volume = np.prod(np.array(vg.size / np.array(vg.shape)))\n# scaling = (np.pi / pg.num_angles) * pixel_height / voxel_volume\n#\n# rec *= scaling\n# pixel_height = proj.size / proj.shape[0]\n# voxel_volume = np.prod(densities.size / np.array(densities.shape))\n# scaling = (np.pi / 1) * pixel_height / voxel_volume\nscaling = densities.shape[0]\nresult = result / scaling\n\nfor i in range(cube_size):\n fig, axs = plt.subplots(ncols=2)\n im = axs[0].imshow(densities[i, :, :], origin='lower')\n fig.colorbar(im)\n\n im = axs[1].imshow(result[i, :, :], origin='lower')\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_test2/{i:03d}.png\")\n plt.close()\n", "experiments_solar.py": "import matplotlib.pyplot as plt\nimport numpy as np\nimport pylops\nfrom datetime import datetime\nfrom glob import glob\nfrom astropy.io import fits\nimport os\nfrom itertools import chain\nfrom tomograpy import project_3d, backproject_3d\nfrom pylops.basicoperators import FunctionOperator\nfrom pylops import LinearOperator, lsqr\nfrom skimage.transform import resize\n\n\ndef z_rotation_matrix_3d(angle):\n return np.array([[[np.cos(angle), np.sin(angle), 0],\n [-np.sin(angle), np.cos(angle), 0],\n [0, 0, 1]]])\n\n\nif __name__ == \"__main__\":\n print(\"Test started\")\n start_time = datetime.now()\n\n angles = list(chain(range(70, 80), range(130, 140)))\n rad_angles = np.deg2rad(angles)\n\n path = \"/Users/jhughes/Desktop/data/synthetic COMPLETE data/1 degree/\"\n filenames = glob(path + \"*.fits\")\n loaded_imgs = {np.deg2rad(angle): fits.open(os.path.join(path + f\"comp_wl_284_ang_{angle}.fits\"))[0].data for angle in angles}\n\n radius = 300\n\n img_size = 100\n cube_size = 100\n\n densities = np.zeros((cube_size, cube_size, cube_size), dtype=np.float32)\n mask = np.ones((cube_size, cube_size, cube_size), dtype=bool)\n\n b = (0, 0, 0)\n b = (-cube_size / 2, -cube_size / 2, -cube_size / 2)\n\n delta = (1.0, 1.0, 1.0)\n path_distance = 500.0\n\n norms, xs, ys, zs, ds, imgs = [], [], [], [], [], []\n #angles = np.array([0, 30])\n for angle in rad_angles:\n t_angle = -angle + np.pi/2 #np.deg2rad(np.abs(90*np.cos(angle)))\n\n img_x = np.arange(img_size) - img_size / 2\n img_y = np.zeros((img_size, img_size))\n img_z = np.arange(img_size) - img_size / 2\n img_x, img_z = np.meshgrid(img_x, img_z)\n\n img_x, img_y, img_z = img_x.flatten(), img_y.flatten(), img_z.flatten()\n\n R = z_rotation_matrix_3d(t_angle)\n coords = (R @ np.stack([img_x, img_y, img_z]))[0]\n img_x, img_y, img_z = coords[0], coords[1], coords[2]\n img_x = radius * np.cos(angle) + img_x # - img_size/2\n img_y = radius * np.sin(angle) + img_y # - img_size/2\n\n xx = img_x.reshape((img_size, img_size)).astype(np.float32)\n yy = -img_y.reshape((img_size, img_size)).astype(np.float32)\n zz = img_z.reshape((img_size, img_size)).astype(np.float32)\n\n v1 = np.array([xx[0, 1] - xx[0, 0], yy[0, 1] - yy[0, 0], zz[0, 1] - zz[0, 0]])\n v2 = np.array([xx[1, 0] - xx[0, 0], yy[1, 0] - yy[0, 0], zz[1, 0] - zz[0, 0]])\n v1 = v1 / np.linalg.norm(v1)\n v2 = v2 / np.linalg.norm(v2)\n normal = np.cross(v1, v2)\n normal = normal / np.linalg.norm(normal)\n\n norm = normal # R @ np.array([1E-7, 1E-7, -1.0])\n norm[norm == 0] = 1E-6\n norms.append(norm)\n print(np.rad2deg(angle), np.rad2deg(t_angle), norm)\n\n d = 500\n img = resize(loaded_imgs[angle], (img_size, img_size)) #project_3d(xx, yy, zz, densities, mask, b, delta, norm, d)\n xs.append(xx)\n ys.append(yy)\n zs.append(zz)\n ds.append(d)\n imgs.append(img.astype(np.float32))\n imgs = np.array(imgs)\n\n # show\n for angle, img in zip(rad_angles, imgs):\n fig, ax = plt.subplots()\n im = ax.imshow(img)\n fig.colorbar(im)\n fig.savefig(f\"/Users/jhughes/Desktop/projection_solar/{int(np.rad2deg(angle)):03d}.png\")\n plt.close()\n #\n # model = np.zeros(densities.shape, dtype=np.float32)\n # for i, img in enumerate(imgs.reshape(len(xs), xs[0].shape[0], xs[0].shape[1])):\n # model = backproject_3d(xs[i], ys[i], zs[i], img,\n # model,\n # mask, b, delta, norms[i], ds[i], True)\n # total = np.sum(img)\n # print(total, img.shape, imgs.shape)\n # model = ((model - total) / (len(imgs) - 1)).astype(np.float32)\n\n # # # 3d plot\n # fig = plt.figure()\n # ax = fig.add_subplot(projection='3d')\n #\n # n = 100\n #\n # for x, y, z in zip(xs, ys, zs):\n # ax.scatter(x, y, z)\n #\n # # for norm, sx, sy in zip(norms, shift_xs, shift_ys):\n # # print(\"TEST\", norm)\n # # ax.plot([0+sx, 100*norm[0]+sx], [0, 100*norm[1]], [0+sy, sy+100*norm[2]])\n #\n # ax.set_xlabel('X Label')\n # ax.set_ylabel('Y Label')\n # ax.set_zlabel('Z Label')\n #\n # plt.show()\n\n\n class Tomo(LinearOperator):\n def __init__(self, xs, ys, zs, norms, ds, b, delta, model_shape, mask, dtype=None):\n self.xs = xs\n self.ys = ys\n self.zs = zs\n self.norms = norms\n self.ds = ds\n self.b = b\n self.delta = delta\n self.model_shape = model_shape\n self.mask = mask\n super().__init__(dtype=np.dtype(dtype),\n dims=self.model_shape,\n dimsd=(len(self.xs), self.xs[0].shape[0], self.xs[0].shape[1]))\n\n def _matvec(self, densities):\n return np.array([project_3d(x, y, z, densities.reshape(self.model_shape).astype(np.float32),\n self.mask, self.b, self.delta, norm, d)\n for x, y, z, norm, d in zip(self.xs, self.ys, self.zs, self.norms, self.ds)]).flatten()\n\n def _rmatvec(self, imgs):\n densitiesi = np.zeros(self.model_shape, dtype=np.float32)\n for i, img in enumerate(imgs.reshape(len(self.xs), self.xs[0].shape[0], self.xs[0].shape[1])):\n # densitiesi += backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n # densitiesi,\n # self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n densitiesi += backproject_3d(self.xs[i], self.ys[i], self.zs[i], img,\n np.zeros_like(densitiesi).astype(np.float32),\n self.mask, self.b, self.delta, self.norms[i], self.ds[i], True)\n # return ((densitiesi - np.sum(img)) / (len(self.xs)-1)).astype(np.float32)\n return densitiesi.flatten().astype(np.float32) #/ densitiesi.shape[0] / len(self.xs)\n #return densitiesi.flatten().astype(np.float32)\n\n print(len(xs))\n op = Tomo(xs, ys, zs, norms, ds, b, delta, densities.shape, mask, dtype=np.float32)\n\n Dop = [\n pylops.FirstDerivative(\n (cube_size, cube_size, cube_size), axis=0, edge=False, kind=\"backward\", dtype=np.float32\n ),\n pylops.FirstDerivative(\n (cube_size, cube_size, cube_size), axis=1, edge=False, kind=\"backward\", dtype=np.float32\n ),\n pylops.FirstDerivative(\n (cube_size, cube_size, cube_size), axis=2, edge=False, kind=\"backward\", dtype=np.float32\n )\n ]\n #\n # # TV\n # mu = 1.5\n # lamda = [0.1, 0.1, 0.1]\n # niter_out = 2\n # niter_in = 1\n #\n # model = pylops.optimization.sparsity.splitbregman(\n # op,\n # imgs.ravel(),\n # Dop,\n # niter_outer=niter_out,\n # niter_inner=niter_in,\n # mu=mu,\n # epsRL1s=lamda,\n # tol=1e-4,\n # tau=1.0,\n # show=True,\n # **dict(iter_lim=5, damp=1e-4)\n # )[0]\n\n # model = op / imgs.flatten()\n # model = pylops.optimization.leastsquares.regularized_inversion(\n # op, imgs.flatten(), Dop, **dict(iter_lim=10, show=True, atol=1E-8, btol=1E-8)\n # )[0]\n #\n # model = pylops.optimization.basic.lsqr(op, imgs.flatten(), x0=np.random.rand(*densities.flatten().shape).astype(np.float32),\n # niter=100, show=True, damp=0)[0]\n model = pylops.optimization.basic.lsqr(op, imgs.flatten(), niter=3, show=True)[0]#, x0 = np.random.randint(0, 30, densities.shape))[0] # x0=densities.flatten() + 50 * np.random.rand(*densities.flatten().shape).astype(np.float32) - 25)[0]\n # niter=100, show=True, damp=0)[0]\n #model = pylops.optimization.basic.lsqr(op, imgs.flatten(), x0=densities.flatten(), niter=10, show=True)[0]\n # model = pylops.optimization.basic.lsqr(op, imgs.flatten(), niter=1, show=True)[0]\n\n # from pylops.optimization.cls_sparsity import FISTA\n #\n # fistasolver = FISTA(op)\n #\n # model = fistasolver.solve(imgs.flatten(), niter=2, show=True)[0]\n\n #model = pylops.optimization.basic.cgls(op, imgs.flatten(), niter=10, show=True)[0]\n model = model.reshape(densities.shape)\n\n np.save(\"/Users/jhughes/Desktop/projection_solar/cube.npy\", model)\n limit = np.nanpercentile(model, 95)\n\n for i in range(cube_size):\n fig, ax = plt.subplots()\n im = ax.imshow(model[:, :, i], vmin=0, vmax=limit)\n fig.colorbar(im)\n fig.show()\n fig.savefig(f\"/Users/jhughes/Desktop/projection_solar/test_{i:03d}.png\")\n\n reconstructions = op @ model.flatten()\n reconstructions = reconstructions.reshape((len(xs), img_size, img_size))\n\n for i, angle in enumerate(rad_angles):\n fig, axs = plt.subplots(ncols=2)\n im = axs[0].imshow(imgs[i])\n fig.colorbar(im)\n im = axs[1].imshow(reconstructions[i])\n fig.colorbar(im)\n fig.show()\n fig.savefig(f\"/Users/jhughes/Desktop/projection_solar/recon_{np.rad2deg(angle)}.png\")\n\n\n\n end_time = datetime.now()\n print(end_time - start_time)\n", "python/tomograpy/__init__.py": "from .tomograpy import *\nfrom . import third\n\n\n__doc__ = tomograpy.__doc__\nif hasattr(tomograpy, \"__all__\"):\n __all__ = tomograpy.__all__\n", "python/tomograpy/coordinates.py": "", "python/tomograpy/third.py": "import numpy as np\n\ndef print_hello(name):\n print(f\"hello {name}\")\n\n"}}
-{"repo": "commandprompt/PL-php", "pr_number": 6, "title": "fix postgresql 9.4 / php 5.5 issues", "state": "open", "merged_at": null, "additions": 5, "deletions": 2, "files_changed": ["plphp.c", "plphp_io.c", "plphp_spi.c"], "files_before": {"plphp.c": "/**********************************************************************\n * plphp.c - PHP as a procedural language for PostgreSQL\n *\n * This software is copyright (c) Command Prompt Inc.\n *\n * The author hereby grants permission to use, copy, modify,\n * distribute, and license this software and its documentation for any\n * purpose, provided that existing copyright notices are retained in\n * all copies and that this notice is included verbatim in any\n * distributions. No written agreement, license, or royalty fee is\n * required for any of the authorized uses. Modifications to this\n * software may be copyrighted by their author and need not follow the\n * licensing terms described here, provided that the new terms are\n * clearly indicated on the first page of each file where they apply.\n *\n * IN NO EVENT SHALL THE AUTHOR OR DISTRIBUTORS BE LIABLE TO ANY PARTY\n * FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES\n * ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY\n * DERIVATIVES THEREOF, EVEN IF THE AUTHOR HAVE BEEN ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n *\n * THE AUTHOR AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,\n * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND\n * NON-INFRINGEMENT. THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS,\n * AND THE AUTHOR AND DISTRIBUTORS HAVE NO OBLIGATION TO PROVIDE\n * MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.\n *\n * IDENTIFICATION\n *\t\t$Id$\n *********************************************************************\n */\n\n\n/* Package configuration generated by autoconf */\n#include \"config.h\"\n\n/* First round of undefs, to eliminate collision between plphp and postgresql\n * definitions\n */\n \n#undef PACKAGE_BUGREPORT\n#undef PACKAGE_NAME\n#undef PACKAGE_STRING\n#undef PACKAGE_TARNAME\n#undef PACKAGE_VERSION\n\n/* PostgreSQL stuff */\n#include \"postgres.h\"\n#include \"access/heapam.h\"\n#include \"access/transam.h\"\n\n#include \"catalog/catversion.h\"\n#include \"catalog/pg_language.h\"\n#include \"catalog/pg_proc.h\"\n#include \"catalog/pg_type.h\"\n\n#include \"commands/trigger.h\"\n#include \"fmgr.h\"\n#include \"funcapi.h\"\t\t\t/* needed for SRF support */\n#include \"lib/stringinfo.h\"\n\n#include \"utils/array.h\"\n#include \"utils/builtins.h\"\n#include \"utils/elog.h\"\n#include \"utils/lsyscache.h\"\n#include \"utils/memutils.h\"\n#include \"utils/rel.h\"\n#include \"utils/syscache.h\"\n#include \"utils/typcache.h\"\n\n/*\n * These are defined again in php.h, so undef them to avoid some\n * cpp warnings.\n */\n#undef PACKAGE_BUGREPORT\n#undef PACKAGE_NAME\n#undef PACKAGE_STRING\n#undef PACKAGE_TARNAME\n#undef PACKAGE_VERSION\n\n/* PHP stuff */\n#include \"php.h\"\n\n#include \"php_variables.h\"\n#include \"php_globals.h\"\n#include \"zend_hash.h\"\n#include \"zend_modules.h\"\n\n#include \"php_ini.h\"\t\t\t/* needed for INI_HARDCODED */\n#include \"php_main.h\"\n\n/* Our own stuff */\n#include \"plphp_io.h\"\n#include \"plphp_spi.h\"\n\n/* system stuff */\n#if HAVE_FCNTL_H\n#include \n#endif\n#if HAVE_UNISTD_H\n#include \n#endif\n\n#define INI_HARDCODED(name,value) \\\n\t\tzend_alter_ini_entry(name, sizeof(name), value, strlen(value), \\\n\t\t\t\t\t\t\t PHP_INI_SYSTEM, PHP_INI_STAGE_ACTIVATE);\n\n/* Check for PostgreSQL version */\n#if (CATALOG_VERSION_NO >= 200709301)\n#define PG_VERSION_83_COMPAT\n#endif\n#if (CATALOG_VERSION_NO >= 200611241)\n#define PG_VERSION_82_COMPAT\n#endif\n/* We only support 8.1 and above */\n#if (CATALOG_VERSION_NO >= 200510211)\n#define PG_VERSION_81_COMPAT\n#else\n#error \"Unsupported PostgreSQL version\"\n#endif\n\n#undef DEBUG_PLPHP_MEMORY\n\n#ifdef DEBUG_PLPHP_MEMORY\n#define REPORT_PHP_MEMUSAGE(where) \\\n\telog(NOTICE, \"PHP mem usage: %s: %u\", where, AG(allocated_memory));\n#else\n#define REPORT_PHP_MEMUSAGE(a) \n#endif\n\n/* PostgreSQL starting from v 8.2 requires this define\n * for all modules.\n */\n#ifdef PG_VERSION_82_COMPAT\nPG_MODULE_MAGIC;\n#else\n/* Supress warnings on 8.1 and below */\n#define ReleaseTupleDesc(tupdesc) \n#endif\n\n/* PHP 5.2 and earlier do not contain these definitions */\n#ifndef Z_SET_ISREF_P\n#define Z_SET_ISREF_P(foo) (foo)->is_ref = 1\n#define Z_UNSET_ISREF_P(foo) (foo)->is_ref = 0\n#endif\n\n/* 8.2 compatibility */\n#ifndef TYPTYPE_PSEUDO\n#define TYPTYPE_PSEUDO 'p'\n#define TYPTYPE_COMPOSITE 'c'\n#endif\n\n/* Check the argument type to expect to accept an initial value */\n#define IS_ARGMODE_OUT(mode) ((mode) == PROARGMODE_OUT || \\\n(mode) == PROARGMODE_TABLE)\n/*\n * Return types. Why on earth is this a bitmask? Beats me.\n * We should have separate flags instead.\n */\ntypedef enum pl_type\n{\n\tPL_TUPLE = 1 << 0,\n\tPL_ARRAY = 1 << 1,\n\tPL_PSEUDO = 1 << 2\n} pl_type;\n\n/*\n * The information we cache about loaded procedures.\n *\n * \"proname\" is the name of the function, given by the user.\n *\n * fn_xmin and fn_cmin are used to know when a function has been redefined and\n * needs to be recompiled.\n *\n * trusted indicates whether the function was created with a trusted handler.\n *\n * ret_type is a weird bitmask that indicates whether this function returns a\n * tuple, an array or a pseudotype. ret_oid is the Oid of the return type.\n * retset indicates whether the function was declared to return a set.\n *\n * arg_argmode indicates whether the argument is IN, OUT or both. It follows\n * values in pg_proc.proargmodes.\n *\n * n_out_args - total number of OUT or INOUT arguments.\n * arg_out_tupdesc is a tuple descriptor of the tuple constructed for OUT args.\n *\n * XXX -- maybe this thing needs to be rethought.\n */\ntypedef struct plphp_proc_desc\n{\n\tchar\t *proname;\n\tTransactionId fn_xmin;\n\tCommandId\tfn_cmin;\n\tbool\t\ttrusted;\n\tpl_type\t\tret_type;\n\tOid\t\t\tret_oid;\t\t/* Oid of returning type */\n\tbool\t\tretset;\n\tFmgrInfo\tresult_in_func;\n\tOid\t\t\tresult_typioparam;\n\tint\t\t\tn_out_args;\n\tint\t\t\tn_total_args;\n\tint\t\t\tn_mixed_args;\n\tFmgrInfo\targ_out_func[FUNC_MAX_ARGS];\n\tOid\t\t\targ_typioparam[FUNC_MAX_ARGS];\n\tchar\t\targ_typtype[FUNC_MAX_ARGS];\n\tchar\t\targ_argmode[FUNC_MAX_ARGS];\n\tTupleDesc\targs_out_tupdesc;\n} plphp_proc_desc;\n\n/*\n * Global data\n */\nstatic bool plphp_first_call = true;\nstatic zval *plphp_proc_array = NULL;\n\n/* for PHP write/flush */\nstatic StringInfo currmsg = NULL;\n\n/*\n * for PHP <-> Postgres error message passing\n *\n * XXX -- it would be much better if we could save errcontext,\n * errhint, etc as well.\n */\nstatic char *error_msg = NULL;\n/*\n * Forward declarations\n */\nstatic void plphp_init_all(void);\nvoid\t\tplphp_init(void);\n\nPG_FUNCTION_INFO_V1(plphp_call_handler);\nDatum plphp_call_handler(PG_FUNCTION_ARGS);\n\nPG_FUNCTION_INFO_V1(plphp_validator);\nDatum plphp_validator(PG_FUNCTION_ARGS);\n\nstatic Datum plphp_trigger_handler(FunctionCallInfo fcinfo,\n\t\t\t\t\t\t\t\t plphp_proc_desc *desc\n\t\t\t\t\t\t\t\t TSRMLS_DC);\nstatic Datum plphp_func_handler(FunctionCallInfo fcinfo,\n\t\t\t\t\t\t\t plphp_proc_desc *desc\n\t\t\t\t\t\t\t\tTSRMLS_DC);\nstatic Datum plphp_srf_handler(FunctionCallInfo fcinfo,\n\t\t\t\t\t\t \t plphp_proc_desc *desc\n\t\t\t\t\t\t\t TSRMLS_DC);\n\nstatic plphp_proc_desc *plphp_compile_function(Oid fnoid, bool is_trigger TSRMLS_DC);\nstatic zval *plphp_call_php_func(plphp_proc_desc *desc,\n\t\t\t\t\t\t\t\t FunctionCallInfo fcinfo\n\t\t\t\t\t\t\t\t TSRMLS_DC);\nstatic zval *plphp_call_php_trig(plphp_proc_desc *desc,\n\t\t\t\t\t\t\t\t FunctionCallInfo fcinfo, zval *trigdata\n\t\t\t\t\t\t\t\t TSRMLS_DC);\n\nstatic void plphp_error_cb(int type, const char *filename, const uint lineno,\n\t\t\t\t\t\t\t\t const char *fmt, va_list args);\nstatic bool is_valid_php_identifier(char *name);\n\n/*\n * FIXME -- this comment is quite misleading actually, which is not surprising\n * since it came verbatim from PL/pgSQL. Rewrite memory handling here someday\n * and remove it.\n *\n * This routine is a crock, and so is everyplace that calls it. The problem\n * is that the cached form of plphp functions/queries is allocated permanently\n * (mostly via malloc()) and never released until backend exit. Subsidiary\n * data structures such as fmgr info records therefore must live forever\n * as well. A better implementation would store all this stuff in a per-\n * function memory context that could be reclaimed at need. In the meantime,\n * fmgr_info_cxt must be called specifying TopMemoryContext so that whatever\n * it might allocate, and whatever the eventual function might allocate using\n * fn_mcxt, will live forever too.\n */\nstatic void\nperm_fmgr_info(Oid functionId, FmgrInfo *finfo)\n{\n\tfmgr_info_cxt(functionId, finfo, TopMemoryContext);\n}\n\n/*\n * sapi_plphp_write\n * \t\tCalled when PHP wants to write something to stdout.\n *\n * We just save the output in a StringInfo until the next Flush call.\n */\nstatic int\nsapi_plphp_write(const char *str, uint str_length TSRMLS_DC)\n{\n\tif (currmsg == NULL)\n\t\tcurrmsg = makeStringInfo();\n\n\tappendStringInfoString(currmsg, str);\n\n\treturn str_length;\n}\n\n/*\n * sapi_plphp_flush\n * \t\tCalled when PHP wants to flush stdout.\n *\n * The stupid PHP implementation calls write and follows with a Flush right\n * away -- a good implementation would write several times and flush when the\n * message is complete. To make the output look reasonable in Postgres, we\n * skip the flushing if the accumulated message does not end in a newline.\n */\nstatic void\nsapi_plphp_flush(void *sth)\n{\n\tif (currmsg != NULL)\n\t{\n\t\tAssert(currmsg->data != NULL);\n\n\t\tif (currmsg->data[currmsg->len - 1] == '\\n')\n\t\t{\n\t\t\t/*\n\t\t\t * remove the trailing newline because elog() inserts another\n\t\t\t * one\n\t\t\t */\n\t\t\tcurrmsg->data[currmsg->len - 1] = '\\0';\n\t\t}\n\t\telog(LOG, \"%s\", currmsg->data);\n\n\t\tpfree(currmsg->data);\n\t\tpfree(currmsg);\n\t\tcurrmsg = NULL;\n\t}\n\telse\n\t\telog(LOG, \"attempting to flush a NULL message\");\n}\n\nstatic int\nsapi_plphp_send_headers(sapi_headers_struct *sapi_headers TSRMLS_DC)\n{\n\treturn 1;\n}\n\nstatic void\nphp_plphp_log_messages(char *message)\n{\n\telog(LOG, \"plphp: %s\", message);\n}\n\n\nstatic sapi_module_struct plphp_sapi_module = {\n\t\"plphp\",\t\t\t\t\t/* name */\n\t\"PL/php PostgreSQL Handler\",/* pretty name */\n\n\tNULL,\t\t\t\t\t\t/* startup */\n\tphp_module_shutdown_wrapper,/* shutdown */\n\n\tNULL,\t\t\t\t\t\t/* activate */\n\tNULL,\t\t\t\t\t\t/* deactivate */\n\n\tsapi_plphp_write,\t\t\t/* unbuffered write */\n\tsapi_plphp_flush,\t\t\t/* flush */\n\tNULL,\t\t\t\t\t\t/* stat */\n\tNULL,\t\t\t\t\t\t/* getenv */\n\n\tphp_error,\t\t\t\t\t/* sapi_error(int, const char *, ...) */\n\n\tNULL,\t\t\t\t\t\t/* header handler */\n\tsapi_plphp_send_headers,\t/* send headers */\n\tNULL,\t\t\t\t\t\t/* send header */\n\n\tNULL,\t\t\t\t\t\t/* read POST */\n\tNULL,\t\t\t\t\t\t/* read cookies */\n\n\tNULL,\t\t\t\t\t\t/* register server variables */\n\tphp_plphp_log_messages,\t\t/* log message */\n\n\tNULL,\t\t\t\t\t\t/* Block interrupts */\n\tNULL,\t\t\t\t\t\t/* Unblock interrupts */\n\tSTANDARD_SAPI_MODULE_PROPERTIES\n};\n\n/*\n * plphp_init_all()\t\t- Initialize all\n *\n * XXX This is called each time a function is invoked.\n */\nstatic void\nplphp_init_all(void)\n{\n\t/* Execute postmaster-startup safe initialization */\n\tif (plphp_first_call)\n\t\tplphp_init();\n\n\t/*\n\t * Any other initialization that must be done each time a new\n\t * backend starts -- currently none.\n\t */\n}\n\n/*\n * This function must not be static, so that it can be used in\n * preload_libraries. If it is, it will be called by postmaster;\n * otherwise it will be called by each backend the first time a\n * function is called.\n */\nvoid\nplphp_init(void)\n{\n\tTSRMLS_FETCH();\n\t/* Do initialization only once */\n\tif (!plphp_first_call)\n\t\treturn;\n\n\t/*\n\t * Need a Pg try/catch block to prevent an initialization-\n\t * failure from bringing the whole server down.\n\t */\n\tPG_TRY();\n\t{\n\t\tzend_try\n\t\t{\n\t\t\t/*\n\t\t\t * XXX This is a hack -- we are replacing the error callback in an\n\t\t\t * invasive manner that should not be expected to work on future PHP\n\t\t\t * releases.\n\t\t\t */\n\t\t\tzend_error_cb = plphp_error_cb;\n\n\t\t\t/* Omit HTML tags from output */\n\t\t\tplphp_sapi_module.phpinfo_as_text = 1;\n\t\t\tsapi_startup(&plphp_sapi_module);\n\n\t\t\tif (php_module_startup(&plphp_sapi_module, NULL, 0) == FAILURE)\n\t\t\t\telog(ERROR, \"php_module_startup call failed\");\n\n\t\t\t/* php_module_startup changed it, so put it back */\n\t\t\tzend_error_cb = plphp_error_cb;\n\n\t\t\t/*\n\t\t\t * FIXME -- Figure out what this comment is supposed to mean:\n\t\t\t *\n\t\t\t * There is no way to see if we must call zend_ini_deactivate()\n\t\t\t * since we cannot check if EG(ini_directives) has been initialised\n\t\t\t * because the executor's constructor does not initialize it.\n\t\t\t * Apart from that there seems no need for zend_ini_deactivate() yet.\n\t\t\t * So we error out.\n\t\t\t */\n\n\t\t\t/* Init procedure cache */\n\t\t\tMAKE_STD_ZVAL(plphp_proc_array);\n\t\t\tarray_init(plphp_proc_array);\n\n\t\t\tzend_register_functions(\n#if PHP_MAJOR_VERSION == 5\n\t\t\t\t\t\t\t\t\tNULL,\n#endif\n\t\t\t\t\t\t\t\t\tspi_functions, NULL,\n\t\t\t\t\t\t\t\t\tMODULE_PERSISTENT TSRMLS_CC);\n\n\t\t\tPG(during_request_startup) = true;\n\n\t\t\t/* Set some defaults */\n\t\t\tSG(options) |= SAPI_OPTION_NO_CHDIR;\n\n\t\t\t/* Hard coded defaults which cannot be overwritten in the ini file */\n\t\t\tINI_HARDCODED(\"register_argc_argv\", \"0\");\n\t\t\tINI_HARDCODED(\"html_errors\", \"0\");\n\t\t\tINI_HARDCODED(\"implicit_flush\", \"1\");\n\t\t\tINI_HARDCODED(\"max_execution_time\", \"0\");\n\t\t\tINI_HARDCODED(\"max_input_time\", \"-1\");\n\n\t\t\t/*\n\t\t\t * Set memory limit to ridiculously high value. This helps the\n\t\t\t * server not to crash, because the PHP allocator has the really\n\t\t\t * stupid idea of calling exit() if the limit is exceeded.\n\t\t\t */\n\t\t\t{\n\t\t\t\tchar\tlimit[15];\n\n\t\t\t\tsnprintf(limit, sizeof(limit), \"%d\", 1 << 30);\n\t\t\t\tINI_HARDCODED(\"memory_limit\", limit);\n\t\t\t}\n\n\t\t\t/* tell the engine we're in non-html mode */\n\t\t\tzend_uv.html_errors = false;\n\n\t\t\t/* not initialized but needed for several options */\n\t\t\tCG(in_compilation) = false;\n\n\t\t\tEG(uninitialized_zval_ptr) = NULL;\n\n\t\t\tif (php_request_startup(TSRMLS_C) == FAILURE)\n\t\t\t{\n\t\t\t\tSG(headers_sent) = 1;\n\t\t\t\tSG(request_info).no_headers = 1;\n\t\t\t\t/* Use Postgres log */\n\t\t\t\telog(ERROR, \"php_request_startup call failed\");\n\t\t\t}\n\n\t\t\tCG(interactive) = false;\n\t\t\tPG(during_request_startup) = true;\n\n\t\t\t/* Register the resource for SPI_result */\n\t\t\tSPIres_rtype = zend_register_list_destructors_ex(php_SPIresult_destroy,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t NULL, \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"SPI result\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0);\n\n\t\t\t/* Ok, we're done */\n\t\t\tplphp_first_call = false;\n\t\t}\n\t\tzend_catch\n\t\t{\n\t\t\tplphp_first_call = true;\n\t\t\tif (error_msg)\n\t\t\t{\n\t\t\t\tchar\tstr[1024];\n\n\t\t\t\tstrncpy(str, error_msg, sizeof(str));\n\t\t\t\tpfree(error_msg);\n\t\t\t\terror_msg = NULL;\n\t\t\t\telog(ERROR, \"fatal error during PL/php initialization: %s\",\n\t\t\t\t\t str);\n\t\t\t}\n\t\t\telse\n\t\t\t\telog(ERROR, \"fatal error during PL/php initialization\");\n\t\t}\n\t\tzend_end_try();\n\t}\n\tPG_CATCH();\n\t{\n\t\tPG_RE_THROW();\n\t}\n\tPG_END_TRY();\n}\n\n/*\n * plphp_call_handler\n *\n * The visible function of the PL interpreter. The PostgreSQL function manager\n * and trigger manager call this function for execution of php procedures.\n */\nDatum\nplphp_call_handler(PG_FUNCTION_ARGS)\n{\n\tDatum\t\tretval;\n\tTSRMLS_FETCH();\n\n\t/* Initialize interpreter */\n\tplphp_init_all();\n\n\tPG_TRY();\n\t{\n\t\t/* Connect to SPI manager */\n\t\tif (SPI_connect() != SPI_OK_CONNECT)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_CONNECTION_FAILURE),\n\t\t\t\t\t errmsg(\"could not connect to SPI manager\")));\n\n\t\tzend_try\n\t\t{\n\t\t\tplphp_proc_desc *desc;\n\n\t\t\t/* Clean up SRF state */\n\t\t\tcurrent_fcinfo = NULL;\n\n\t\t\t/* Redirect to the appropiate handler */\n\t\t\tif (CALLED_AS_TRIGGER(fcinfo))\n\t\t\t{\n\t\t\t\tdesc = plphp_compile_function(fcinfo->flinfo->fn_oid, true TSRMLS_CC);\n\n\t\t\t\t/* Activate PHP safe mode if needed */\n\t\t\t\tPG(safe_mode) = desc->trusted;\n\n\t\t\t\tretval = plphp_trigger_handler(fcinfo, desc TSRMLS_CC);\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tdesc = plphp_compile_function(fcinfo->flinfo->fn_oid, false TSRMLS_CC);\n\n\t\t\t\t/* Activate PHP safe mode if needed */\n\t\t\t\tPG(safe_mode) = desc->trusted;\n\n\t\t\t\tif (desc->retset)\n\t\t\t\t\tretval = plphp_srf_handler(fcinfo, desc TSRMLS_CC);\n\t\t\t\telse\n\t\t\t\t\tretval = plphp_func_handler(fcinfo, desc TSRMLS_CC);\n\t\t\t}\n\t\t}\n\t\tzend_catch\n\t\t{\n\t\t\tREPORT_PHP_MEMUSAGE(\"reporting error\");\n\t\t\tif (error_msg)\n\t\t\t{\n\t\t\t\tchar\tstr[1024];\n\n\t\t\t\tstrncpy(str, error_msg, sizeof(str));\n\t\t\t\tpfree(error_msg);\n\t\t\t\terror_msg = NULL;\n\t\t\t\telog(ERROR, \"%s\", str);\n\t\t\t}\n\t\t\telse\n\t\t\t\telog(ERROR, \"fatal error\");\n\n\t\t\t/* not reached, but keep compiler quiet */\n\t\t\treturn 0;\n\t\t}\n\t\tzend_end_try();\n\t}\n\tPG_CATCH();\n\t{\n\t\tPG_RE_THROW();\n\t}\n\tPG_END_TRY();\n\n\treturn retval;\n}\n\n/*\n * plphp_validator\n *\n * \t\tValidator function for checking the function's syntax at creation\n * \t\ttime\n */\nDatum\nplphp_validator(PG_FUNCTION_ARGS)\n{\n\tOid\t\t\t\tfuncoid = PG_GETARG_OID(0);\n\tForm_pg_proc\tprocForm;\n\tHeapTuple\t\tprocTup;\n\tchar\t\t\ttmpname[32];\n\tchar\t\t\tfuncname[NAMEDATALEN];\n\tchar\t\t *tmpsrc = NULL,\n\t\t\t\t *prosrc;\n\tDatum\t\t\tprosrcdatum;\n\n\n\tTSRMLS_FETCH();\n\t/* Initialize interpreter */\n\tplphp_init_all();\n\n\tPG_TRY();\n\t{\n\t\tbool\t\t\tisnull;\n\t\t/* Grab the pg_proc tuple */\n\t\tprocTup = SearchSysCache(PROCOID,\n\t\t\t\t\t\t\t\t ObjectIdGetDatum(funcoid),\n\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\tif (!HeapTupleIsValid(procTup))\n\t\t\telog(ERROR, \"cache lookup failed for function %u\", funcoid);\n\n\t\tprocForm = (Form_pg_proc) GETSTRUCT(procTup);\n\n\t\t/* Get the function source code */\n\t\tprosrcdatum = SysCacheGetAttr(PROCOID,\n\t\t\t\t\t\t\t\t\t procTup,\n\t\t\t\t\t\t\t\t\t Anum_pg_proc_prosrc,\n\t\t\t\t\t\t\t\t\t &isnull);\n\t\tif (isnull)\n\t\t\telog(ERROR, \"cache lookup yielded NULL prosrc\");\n\t\tprosrc = DatumGetCString(DirectFunctionCall1(textout,\n\t\t\t\t\t\t\t\t\t\t\t\t\t prosrcdatum));\n\n\t\t/* Get the function name, for the error message */\n\t\tStrNCpy(funcname, NameStr(procForm->proname), NAMEDATALEN);\n\n\t\t/* Let go of the pg_proc tuple */\n\t\tReleaseSysCache(procTup);\n\n\t\t/* Create a PHP function creation statement */\n\t\tsnprintf(tmpname, sizeof(tmpname), \"plphp_temp_%u\", funcoid);\n\t\ttmpsrc = (char *) palloc(strlen(prosrc) +\n\t\t\t\t\t\t\t\t strlen(tmpname) +\n\t\t\t\t\t\t\t\t strlen(\"function ($args, $argc){ } \"));\n\t\tsprintf(tmpsrc, \"function %s($args, $argc){%s}\",\n\t\t\t\ttmpname, prosrc);\n\n\t\tpfree(prosrc);\n\n\t\tzend_try\n\t\t{\n\t\t\t/*\n\t\t\t * Delete the function from the PHP function table, just in case it\n\t\t\t * already existed. This is quite unlikely, but still.\n\t\t\t */\n\t\t\tzend_hash_del(CG(function_table), tmpname, strlen(tmpname) + 1);\n\n\t\t\t/*\n\t\t\t * Let the user see the fireworks. If the function doesn't validate,\n\t\t\t * the ERROR will be raised and the function will not be created.\n\t\t\t */\n\t\t\tif (zend_eval_string(tmpsrc, NULL,\n\t\t\t\t\t\t\t\t \"plphp function temp source\" TSRMLS_CC) == FAILURE)\n\t\t\t\telog(ERROR, \"function \\\"%s\\\" does not validate\", funcname);\n\n\t\t\tpfree(tmpsrc);\n\t\t\ttmpsrc = NULL;\n\n\t\t\t/* Delete the newly-created function from the PHP function table. */\n\t\t\tzend_hash_del(CG(function_table), tmpname, strlen(tmpname) + 1);\n\t\t}\n\t\tzend_catch\n\t\t{\n\t\t\tif (tmpsrc != NULL)\n\t\t\t\tpfree(tmpsrc);\n\n\t\t\tif (error_msg)\n\t\t\t{\n\t\t\t\tchar\tstr[1024];\n\n\t\t\t\tStrNCpy(str, error_msg, sizeof(str));\n\t\t\t\tpfree(error_msg);\n\t\t\t\terror_msg = NULL;\n\t\t\t\telog(ERROR, \"function \\\"%s\\\" does not validate: %s\", funcname, str);\n\t\t\t}\n\t\t\telse\n\t\t\t\telog(ERROR, \"fatal error\");\n\n\t\t\t/* not reached, but keep compiler quiet */\n\t\t\treturn 0;\n\t\t}\n\t\tzend_end_try();\n\n\t\t/* The result of a validator is ignored */\n\t\tPG_RETURN_VOID();\n\t}\n\tPG_CATCH();\n\t{\n\t\tPG_RE_THROW();\n\t}\n\tPG_END_TRY();\n}\n\n/*\n * plphp_get_function_tupdesc\n *\n * \t\tReturns a TupleDesc of the function's return type.\n */\nstatic TupleDesc\nplphp_get_function_tupdesc(Oid result_type, Node *rsinfo)\n{\n\tif (result_type == RECORDOID)\n\t{\n\t\tReturnSetInfo *rs = (ReturnSetInfo *) rsinfo;\n\t\t/* We must get the information from call context */\n\t\tif (!rsinfo || !IsA(rsinfo, ReturnSetInfo) || rs->expectedDesc == NULL)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t\t errmsg(\"function returning record called in context \"\n\t\t\t\t\t\t\t\"that cannot accept type record\")));\n\t\treturn rs->expectedDesc;\n\t}\n\telse\n\t\t/* ordinary composite type */\n\t\treturn lookup_rowtype_tupdesc(result_type, -1);\n}\n\n\n/*\n * Build the $_TD array for the trigger function.\n */\nstatic zval *\nplphp_trig_build_args(FunctionCallInfo fcinfo)\n{\n\tTriggerData\t *tdata;\n\tTupleDesc\t\ttupdesc;\n\tzval\t\t *retval;\n\tint\t\t\t\ti;\n\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\ttdata = (TriggerData *) fcinfo->context;\n\ttupdesc = tdata->tg_relation->rd_att;\n\n\t/* The basic variables */\n\tadd_assoc_string(retval, \"name\", tdata->tg_trigger->tgname, 1);\n add_assoc_long(retval, \"relid\", tdata->tg_relation->rd_id);\n\tadd_assoc_string(retval, \"relname\", SPI_getrelname(tdata->tg_relation), 1);\n\tadd_assoc_string(retval, \"schemaname\", SPI_getnspname(tdata->tg_relation), 1);\n\n\t/* EVENT */\n\tif (TRIGGER_FIRED_BY_INSERT(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"event\", \"INSERT\", 1);\n\telse if (TRIGGER_FIRED_BY_DELETE(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"event\", \"DELETE\", 1);\n\telse if (TRIGGER_FIRED_BY_UPDATE(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"event\", \"UPDATE\", 1);\n\telse\n\t\telog(ERROR, \"unknown firing event for trigger function\");\n\n\t/* NEW and OLD as appropiate */\n\tif (TRIGGER_FIRED_FOR_ROW(tdata->tg_event))\n\t{\n\t\tif (TRIGGER_FIRED_BY_INSERT(tdata->tg_event))\n\t\t{\n\t\t\tzval\t *hashref;\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_trigtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"new\", hashref);\n\t\t}\n\t\telse if (TRIGGER_FIRED_BY_DELETE(tdata->tg_event))\n\t\t{\n\t\t\tzval\t *hashref;\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_trigtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"old\", hashref);\n\t\t}\n\t\telse if (TRIGGER_FIRED_BY_UPDATE(tdata->tg_event))\n\t\t{\n\t\t\tzval\t *hashref;\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_newtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"new\", hashref);\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_trigtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"old\", hashref);\n\t\t}\n\t\telse\n\t\t\telog(ERROR, \"unknown firing event for trigger function\");\n\t}\n\n\t/* ARGC and ARGS */\n\tadd_assoc_long(retval, \"argc\", tdata->tg_trigger->tgnargs);\n\n\tif (tdata->tg_trigger->tgnargs > 0)\n\t{\n\t\tzval\t *hashref;\n\n\t\tMAKE_STD_ZVAL(hashref);\n\t\tarray_init(hashref);\n\n\t\tfor (i = 0; i < tdata->tg_trigger->tgnargs; i++)\n\t\t\tadd_index_string(hashref, i, tdata->tg_trigger->tgargs[i], 1);\n\n\t\tzend_hash_update(retval->value.ht, \"args\", strlen(\"args\") + 1,\n\t\t\t\t\t\t (void *) &hashref, sizeof(zval *), NULL);\n\t}\n\n\t/* WHEN */\n\tif (TRIGGER_FIRED_BEFORE(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"when\", \"BEFORE\", 1);\n\telse if (TRIGGER_FIRED_AFTER(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"when\", \"AFTER\", 1);\n\telse\n\t\telog(ERROR, \"unknown firing time for trigger function\");\n\n\t/* LEVEL */\n\tif (TRIGGER_FIRED_FOR_ROW(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"level\", \"ROW\", 1);\n\telse if (TRIGGER_FIRED_FOR_STATEMENT(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"level\", \"STATEMENT\", 1);\n\telse\n\t\telog(ERROR, \"unknown firing level for trigger function\");\n\n\treturn retval;\n}\n\n/*\n * plphp_trigger_handler\n * \t\tHandler for trigger function calls\n */\nstatic Datum\nplphp_trigger_handler(FunctionCallInfo fcinfo, plphp_proc_desc *desc TSRMLS_DC)\n{\n\tDatum\t\tretval = 0;\n\tchar\t *srv;\n\tzval\t *phpret,\n\t\t\t *zTrigData;\n\tTriggerData *trigdata;\n\n\tREPORT_PHP_MEMUSAGE(\"going to build the trigger arg\");\n\n\tzTrigData = plphp_trig_build_args(fcinfo);\n\n\tREPORT_PHP_MEMUSAGE(\"going to call the trigger function\");\n\n\tphpret = plphp_call_php_trig(desc, fcinfo, zTrigData TSRMLS_CC);\n\tif (!phpret)\n\t\telog(ERROR, \"error during execution of function %s\", desc->proname);\n\n\tREPORT_PHP_MEMUSAGE(\"trigger called, going to build the return value\");\n\n\t/*\n\t * Disconnect from SPI manager and then create the return values datum (if\n\t * the input function does a palloc for it this must not be allocated in\n\t * the SPI memory context because SPI_finish would free it).\n\t */\n\tif (SPI_finish() != SPI_OK_FINISH)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_CONNECTION_DOES_NOT_EXIST),\n\t\t\t\t errmsg(\"could not disconnect from SPI manager\")));\n\n\ttrigdata = (TriggerData *) fcinfo->context;\n\n\tif (zTrigData->type != IS_ARRAY)\n\t\telog(ERROR, \"$_TD is not an array\");\n\t\t\t \n\t/*\n\t * In a BEFORE trigger, compute the return value. In an AFTER trigger\n\t * it'll be ignored, so don't bother.\n\t */\n\tif (TRIGGER_FIRED_BEFORE(trigdata->tg_event))\n\t{\n\t\tswitch (phpret->type)\n\t\t{\n\t\t\tcase IS_STRING:\n\t\t\t\tsrv = phpret->value.str.val;\n\t\t\t\tif (strcasecmp(srv, \"SKIP\") == 0)\n\t\t\t\t{\n\t\t\t\t\t/* do nothing */\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\telse if (strcasecmp(srv, \"MODIFY\") == 0)\n\t\t\t\t{\n\t\t\t\t\tif (TRIGGER_FIRED_BY_INSERT(trigdata->tg_event) ||\n\t\t\t\t\t\tTRIGGER_FIRED_BY_UPDATE(trigdata->tg_event))\n\t\t\t\t\t\tretval = PointerGetDatum(plphp_modify_tuple(zTrigData,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttrigdata));\n\t\t\t\t\telse if (TRIGGER_FIRED_BY_DELETE(trigdata->tg_event))\n\t\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t\t\t\t\t errmsg(\"on delete trigger can not modify the the return tuple\")));\n\t\t\t\t\telse\n\t\t\t\t\t\telog(ERROR, \"unknown event in trigger function\");\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t\t\t\t errmsg(\"expected trigger function to return NULL, 'SKIP' or 'MODIFY'\")));\n\t\t\t\tbreak;\n\t\t\tcase IS_NULL:\n\t\t\t\tif (TRIGGER_FIRED_BY_INSERT(trigdata->tg_event) ||\n\t\t\t\t\tTRIGGER_FIRED_BY_DELETE(trigdata->tg_event))\n\t\t\t\t\tretval = (Datum) trigdata->tg_trigtuple;\n\t\t\t\telse if (TRIGGER_FIRED_BY_UPDATE(trigdata->tg_event))\n\t\t\t\t\tretval = (Datum) trigdata->tg_newtuple;\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t\t\t errmsg(\"expected trigger function to return NULL, 'SKIP' or 'MODIFY'\")));\n\t\t\t\tbreak;\n\t\t}\n\t}\n\n\tREPORT_PHP_MEMUSAGE(\"freeing some variables\");\n\n\tzval_dtor(zTrigData);\n\tzval_dtor(phpret);\n\n\tFREE_ZVAL(phpret);\n\tFREE_ZVAL(zTrigData);\n\n\tREPORT_PHP_MEMUSAGE(\"trigger call done\");\n\n\treturn retval;\n}\n\n/*\n * plphp_func_handler\n * \t\tHandler for regular function calls\n */\nstatic Datum\nplphp_func_handler(FunctionCallInfo fcinfo, plphp_proc_desc *desc TSRMLS_DC)\n{\n\tzval\t *phpret = NULL;\n\tDatum\t\tretval;\n\tchar\t *retvalbuffer = NULL;\n\n\t/* SRFs are handled separately */\n\tAssert(!desc->retset);\n\n\t/* Call the PHP function. */\n\tphpret = plphp_call_php_func(desc, fcinfo TSRMLS_CC);\n\tif (!phpret)\n\t\telog(ERROR, \"error during execution of function %s\", desc->proname);\n\n\tREPORT_PHP_MEMUSAGE(\"function invoked\");\n\n\t/* Basic datatype checks */\n\tif ((desc->ret_type & PL_ARRAY) && phpret->type != IS_ARRAY)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_DATATYPE_MISMATCH),\n\t\t\t\t errmsg(\"function declared to return array must return an array\")));\n\tif ((desc->ret_type & PL_TUPLE) && phpret->type != IS_ARRAY)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_DATATYPE_MISMATCH),\n\t\t\t\t errmsg(\"function declared to return tuple must return an array\")));\n\n\t/*\n\t * Disconnect from SPI manager and then create the return values datum (if\n\t * the input function does a palloc for it this must not be allocated in\n\t * the SPI memory context because SPI_finish would free it).\n\t */\n\tif (SPI_finish() != SPI_OK_FINISH)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_CONNECTION_DOES_NOT_EXIST),\n\t\t\t\t errmsg(\"could not disconnect from SPI manager\")));\n\tretval = (Datum) 0;\n\n\tif (desc->ret_type & PL_PSEUDO)\n\t{\n\t\tHeapTuple\tretTypeTup;\n\t\tForm_pg_type retTypeStruct;\n\n\t\tretTypeTup = SearchSysCache(TYPEOID,\n\t\t\t\t\t\t\t\t\tObjectIdGetDatum(get_fn_expr_rettype(fcinfo->flinfo)),\n\t\t\t\t\t\t\t\t\t0, 0, 0);\n\t\tretTypeStruct = (Form_pg_type) GETSTRUCT(retTypeTup);\n\t\tperm_fmgr_info(retTypeStruct->typinput, &(desc->result_in_func));\n\t\tdesc->result_typioparam = retTypeStruct->typelem;\n\t\tReleaseSysCache(retTypeTup);\n\t}\n\n\tif (phpret)\n\t{\n\t\tswitch (Z_TYPE_P(phpret))\n\t\t{\n\t\t\tcase IS_NULL:\n\t\t\t\tfcinfo->isnull = true;\n\t\t\t\tbreak;\n\t\t\tcase IS_BOOL:\n\t\t\tcase IS_DOUBLE:\n\t\t\tcase IS_LONG:\n\t\t\tcase IS_STRING:\n\t\t\t\tretvalbuffer = plphp_zval_get_cstring(phpret, false, false);\n\t\t\t\tretval = CStringGetDatum(retvalbuffer);\n\t\t\t\tbreak;\n\t\t\tcase IS_ARRAY:\n\t\t\t\tif (desc->ret_type & PL_ARRAY)\n\t\t\t\t{\n\t\t\t\t\tretvalbuffer = plphp_convert_to_pg_array(phpret);\n\t\t\t\t\tretval = CStringGetDatum(retvalbuffer);\n\t\t\t\t}\n\t\t\t\telse if (desc->ret_type & PL_TUPLE)\n\t\t\t\t{\n\t\t\t\t\tTupleDesc\ttd;\n\t\t\t\t\tHeapTuple\ttup;\n\n\t\t\t\t\tif (desc->ret_type & PL_PSEUDO)\n\t\t\t\t\t\ttd = plphp_get_function_tupdesc(desc->ret_oid,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tfcinfo->resultinfo);\n\t\t\t\t\telse\n\t\t\t\t\t\ttd = lookup_rowtype_tupdesc(desc->ret_oid, (int32) -1);\n\n\t\t\t\t\tif (!td)\n\t\t\t\t\t\telog(ERROR, \"no TupleDesc info available\");\n\n\t\t\t\t\ttup = plphp_htup_from_zval(phpret, td);\n\t\t\t\t\tretval = HeapTupleGetDatum(tup);\n\t\t\t\t\tReleaseTupleDesc(td);\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t\t/* FIXME -- should return the thing as a string? */\n\t\t\t\t\telog(ERROR, \"this plphp function cannot return arrays\");\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\telog(WARNING,\n\t\t\t\t\t \"plphp functions cannot return type %i\",\n\t\t\t\t\t phpret->type);\n\t\t\t\tfcinfo->isnull = true;\n\t\t\t\tbreak;\n\t\t}\n\t}\n\telse\n\t{\n\t\tfcinfo->isnull = true;\n\t\tretval = (Datum) 0;\n\t}\n\n\tif (!fcinfo->isnull && !(desc->ret_type & PL_TUPLE))\n\t{\n\t\tretval = FunctionCall3(&desc->result_in_func,\n\t\t\t\t\t\t\t PointerGetDatum(retvalbuffer),\n\t\t\t\t\t\t\t ObjectIdGetDatum(desc->result_typioparam),\n\t\t\t\t\t\t\t Int32GetDatum(-1));\n\t\tpfree(retvalbuffer);\n\t}\n\n\tREPORT_PHP_MEMUSAGE(\"finished calling user function\");\n\n\treturn retval;\n}\n\n/*\n * plphp_srf_handler\n * \t\tInvoke a SRF\n */\nstatic Datum\nplphp_srf_handler(FunctionCallInfo fcinfo, plphp_proc_desc *desc TSRMLS_DC)\n{\n\tReturnSetInfo *rsi = (ReturnSetInfo *) fcinfo->resultinfo;\n\tTupleDesc\ttupdesc;\n\tzval\t *phpret;\n\tMemoryContext\toldcxt;\n\n\tAssert(desc->retset);\n\n\tcurrent_fcinfo = fcinfo;\n\tcurrent_tuplestore = NULL;\n\n\t/* Check context before allowing the call to go through */\n\tif (!rsi || !IsA(rsi, ReturnSetInfo) ||\n\t\t(rsi->allowedModes & SFRM_Materialize) == 0 ||\n\t\trsi->expectedDesc == NULL)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t errmsg(\"set-valued function called in context that \"\n\t\t\t\t\t\t\"cannot accept a set\")));\n\n\t/*\n\t * Fetch the function's tuple descriptor. This will return NULL in the\n\t * case of a scalar return type, in which case we will copy the TupleDesc\n\t * from the ReturnSetInfo.\n\t */\n\tget_call_result_type(fcinfo, NULL, &tupdesc);\n\tif (tupdesc == NULL)\n\t\ttupdesc = rsi->expectedDesc;\n\n\t/*\n\t * If the expectedDesc is NULL, bail out, because most likely it's using\n\t * IN/OUT parameters.\n\t */\n\tif (tupdesc == NULL)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t errmsg(\"cannot use IN/OUT parameters in PL/php\")));\n\n\toldcxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);\n\n\t/* This context is reset once per row in return_next */\n\tcurrent_memcxt = AllocSetContextCreate(CurTransactionContext,\n\t\t\t\t\t\t\t\t\t\t \"PL/php SRF context\",\n\t\t\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MINSIZE,\n\t\t\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_INITSIZE,\n\t\t\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MAXSIZE);\n\n\t/* Tuple descriptor and AttInMetadata for return_next */\n\tcurrent_tupledesc = CreateTupleDescCopy(tupdesc);\n\tcurrent_attinmeta = TupleDescGetAttInMetadata(current_tupledesc);\n\n\t/*\n\t * Call the PHP function. The user code must call return_next, which will\n\t * create and populate the tuplestore appropiately.\n\t */\n\tphpret = plphp_call_php_func(desc, fcinfo TSRMLS_CC);\n\n\t/* We don't use the return value */\n\tzval_dtor(phpret);\n\n\t/* Close the SPI connection */\n\tif (SPI_finish() != SPI_OK_FINISH)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_CONNECTION_DOES_NOT_EXIST),\n\t\t\t\t errmsg(\"could not disconnect from SPI manager\")));\n\n\t/* Now prepare the return values. */\n\trsi->returnMode = SFRM_Materialize;\n\n\tif (current_tuplestore)\n\t{\n\t\trsi->setResult = current_tuplestore;\n\t\trsi->setDesc = current_tupledesc;\n\t}\n\n\tMemoryContextDelete(current_memcxt);\n\tcurrent_memcxt = NULL;\n\tcurrent_tupledesc = NULL;\n\tcurrent_attinmeta = NULL;\n\n\tMemoryContextSwitchTo(oldcxt);\n\n\t/* All done */\n\treturn (Datum) 0;\n}\n\n/*\n * plphp_compile_function\n *\n * \t\tCompile (or hopefully just look up) function\n */\nstatic plphp_proc_desc *\nplphp_compile_function(Oid fnoid, bool is_trigger TSRMLS_DC)\n{\n\tHeapTuple\tprocTup;\n\tForm_pg_proc procStruct;\n\tchar\t\tinternal_proname[64];\n\tplphp_proc_desc *prodesc = NULL;\n\tint\t\t\ti;\n\tchar\t *pointer = NULL;\n\n\t/*\n\t * We'll need the pg_proc tuple in any case... \n\t */\n\tprocTup = SearchSysCache(PROCOID, ObjectIdGetDatum(fnoid), 0, 0, 0);\n\tif (!HeapTupleIsValid(procTup))\n\t\telog(ERROR, \"cache lookup failed for function %u\", fnoid);\n\tprocStruct = (Form_pg_proc) GETSTRUCT(procTup);\n\n\t/*\n\t * Build our internal procedure name from the function's Oid\n\t */\n\tif (is_trigger)\n\t\tsnprintf(internal_proname, sizeof(internal_proname),\n\t\t\t\t \"plphp_proc_%u_trigger\", fnoid);\n\telse\n\t\tsnprintf(internal_proname, sizeof(internal_proname),\n\t\t\t\t \"plphp_proc_%u\", fnoid);\n\n\t/*\n\t * Look up the internal proc name in the hashtable\n\t */\n\tpointer = plphp_zval_get_cstring(plphp_array_get_elem(plphp_proc_array,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t internal_proname),\n\t\t\t\t\t\t\t\t\t false, true);\n\tif (pointer)\n\t{\n\t\tbool uptodate;\n\t\tsscanf(pointer, \"%p\", &prodesc);\n\n#ifdef PG_VERSION_83_COMPAT\n\t\t/* PostgreSQL 8.3 doesn't allow calling GetCmin if a tuple doesn't\n\t\t * originate from the current transaction.\n\t\t */\n\t\tuptodate =\n\t\t\t(prodesc->fn_xmin == HeapTupleHeaderGetXmin(procTup->t_data) &&\n\t\t\t prodesc->fn_cmin == HeapTupleHeaderGetRawCommandId(procTup->t_data));\n\n#else\n\t\tuptodate =\n\t\t\t(prodesc->fn_xmin == HeapTupleHeaderGetXmin(procTup->t_data) &&\n\t\t\t prodesc->fn_cmin == HeapTupleHeaderGetCmin(procTup->t_data));\n\n#endif\n\n\n\t\t/* We need to delete the old entry */\n\t\tif (!uptodate)\n\t\t{\n\t\t\t/*\n\t\t\t * FIXME -- use a per-function memory context and fix this\n\t\t\t * stuff for good\n\t\t\t */\n\t\t\tfree(prodesc->proname);\n\t\t\tfree(prodesc);\n\t\t\tprodesc = NULL;\n\t\t}\n\t}\n\n\tif (prodesc == NULL)\n\t{\n\t\tHeapTuple\tlangTup;\n\t\tForm_pg_language langStruct;\n\t\tDatum\t\tprosrcdatum;\n\t\tbool\t\tisnull;\n\t\tchar\t *proc_source;\n\t\tchar\t *complete_proc_source;\n\t\tchar\t *pointer = NULL;\n\t\tchar\t *aliases = NULL;\n\t\tchar\t *out_aliases = NULL;\n\t\tchar\t *out_return_str = NULL;\n\t\tint16\ttyplen;\n\t\tchar\ttypbyval,\n\t\t\t\ttypalign,\n\t\t\t\ttyptype,\n\t\t\t\ttypdelim;\n\t\tOid\t\ttypioparam,\n\t\t\t\ttypinput,\n\t\t\t\ttypoutput;\n\t\t/*\n\t\t * Allocate a new procedure description block\n\t\t */\n\t\tprodesc = (plphp_proc_desc *) malloc(sizeof(plphp_proc_desc));\n\t\tif (!prodesc)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_OUT_OF_MEMORY),\n\t\t\t\t\t errmsg(\"out of memory\")));\n\n\t\tMemSet(prodesc, 0, sizeof(plphp_proc_desc));\n\t\tprodesc->proname = strdup(internal_proname);\n\t\tif (!prodesc->proname)\n\t\t{\n\t\t\tfree(prodesc);\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_OUT_OF_MEMORY),\n\t\t\t\t\t errmsg(\"out of memory\")));\n\t\t}\n\n\t\tprodesc->fn_xmin = HeapTupleHeaderGetXmin(procTup->t_data);\n\n#ifdef PG_VERSION_83_COMPAT\n\t\t/* PostgreSQL 8.3 doesn't allow calling GetCmin if a tuple doesn't\n\t\t * originate from the current transaction.\n\t\t */\n\t\tprodesc->fn_cmin = HeapTupleHeaderGetRawCommandId(procTup->t_data);\n\n#else\n\t\tprodesc->fn_cmin = HeapTupleHeaderGetCmin(procTup->t_data);\n\n#endif\n\n\t\t/*\n\t\t * Look up the pg_language tuple by Oid\n\t\t */\n\t\tlangTup = SearchSysCache(LANGOID,\n\t\t\t\t\t\t\t\t ObjectIdGetDatum(procStruct->prolang),\n\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\tif (!HeapTupleIsValid(langTup))\n\t\t{\n\t\t\tfree(prodesc->proname);\n\t\t\tfree(prodesc);\n\t\t\telog(ERROR, \"cache lookup failed for language %u\",\n\t\t\t\t\t procStruct->prolang);\n\t\t}\n\t\tlangStruct = (Form_pg_language) GETSTRUCT(langTup);\n\t\tprodesc->trusted = langStruct->lanpltrusted;\n\t\tReleaseSysCache(langTup);\n\n\t\t/*\n\t\t * Get the required information for input conversion of the return\n\t\t * value, and output conversion of the procedure's arguments.\n\t\t */\n\t\tif (!is_trigger)\n\t\t{\n\t\t\tchar **argnames;\n\t\t\tchar *argmodes;\n\t\t\tOid *argtypes;\n\t\t\tint32\talias_str_end,\n\t\t\t\t\tout_str_end;\n\n\t\t\ttyptype = get_typtype(procStruct->prorettype);\n\t\t\tget_type_io_data(procStruct->prorettype,\n\t\t\t\t\t\t\t IOFunc_input,\n\t\t\t\t\t\t\t &typlen,\n\t\t\t\t\t\t\t &typbyval,\n\t\t\t\t\t\t\t &typalign,\n\t\t\t\t\t\t\t &typdelim,\n\t\t\t\t\t\t\t &typioparam,\n\t\t\t\t\t\t\t &typinput);\n\n\t\t\t/*\n\t\t\t * Disallow pseudotype result, except:\n\t\t\t * VOID, RECORD, ANYELEMENT or ANYARRAY\n\t\t\t */\n\t\t\tif (typtype == TYPTYPE_PSEUDO)\n\t\t\t{\n\t\t\t\tif ((procStruct->prorettype == VOIDOID) ||\n\t\t\t\t\t(procStruct->prorettype == RECORDOID) ||\n\t\t\t\t\t(procStruct->prorettype == ANYELEMENTOID) ||\n\t\t\t\t\t(procStruct->prorettype == ANYARRAYOID))\n\t\t\t\t{\n\t\t\t\t\t/* okay */\n\t\t\t\t\tprodesc->ret_type |= PL_PSEUDO;\n\t\t\t\t}\n\t\t\t\telse if (procStruct->prorettype == TRIGGEROID)\n\t\t\t\t{\n\t\t\t\t\tfree(prodesc->proname);\n\t\t\t\t\tfree(prodesc);\n\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t\t\t\t errmsg(\"trigger functions may only be called \"\n\t\t\t\t\t\t\t\t\t\"as triggers\")));\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\tfree(prodesc->proname);\n\t\t\t\t\tfree(prodesc);\n\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t\t\t\t errmsg(\"plphp functions cannot return type %s\",\n\t\t\t\t\t\t\t\t\tformat_type_be(procStruct->prorettype))));\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tprodesc->ret_oid = procStruct->prorettype;\n\t\t\tprodesc->retset = procStruct->proretset;\n\n\t\t\tif (typtype == TYPTYPE_COMPOSITE ||\n\t\t\t\tprocStruct->prorettype == RECORDOID)\n\t\t\t{\n\t\t\t\tprodesc->ret_type |= PL_TUPLE;\n\t\t\t}\n\n\t\t\tif (procStruct->prorettype == ANYARRAYOID)\n\t\t\t\tprodesc->ret_type |= PL_ARRAY;\n\t\t\telse\n\t\t\t{\n\t\t\t\t/* function returns a normal (declared) array */\n\t\t\t\tif (typlen == -1 && get_element_type(procStruct->prorettype))\n\t\t\t\t\tprodesc->ret_type |= PL_ARRAY;\n\t\t\t}\n\n\t\t\tperm_fmgr_info(typinput, &(prodesc->result_in_func));\n\t\t\tprodesc->result_typioparam = typioparam;\n\n\t\t\t/* Deal with named arguments, OUT, IN/OUT and TABLE arguments */\n\n\t\t\tprodesc->n_total_args = get_func_arg_info(procTup, &argtypes, \n\t\t\t\t\t\t\t\t\t\t\t \t\t &argnames, &argmodes);\n\t\t\tprodesc->n_out_args = 0;\n\t\t\tprodesc->n_mixed_args = 0;\n\t\t\t\n\t\t\tprodesc->args_out_tupdesc = NULL;\n\t\t\tout_return_str = NULL;\n\t\t\talias_str_end = out_str_end = 0;\n\n\t\t\t/* Count the number of OUT arguments. Need to do this out of the\n\t\t\t * main loop, to correctly determine the object to return for OUT args\n\t\t */\n\t\t\tif (argmodes)\n\t\t\t\tfor (i = 0; i < prodesc->n_total_args; i++)\n\t\t\t\t{\n\t\t\t\t\tswitch(argmodes[i])\n\t\t\t\t\t{\n\t\t\t\t\t\tcase PROARGMODE_OUT: \n\t\t\t\t\t\t\tprodesc->n_out_args++;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_INOUT: \n\t\t\t\t\t\t\tprodesc->n_mixed_args++;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_IN:\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_TABLE:\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_VARIADIC:\n\t\t\t\t\t\t\telog(ERROR, \"VARIADIC arguments are not supported\");\n\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\telog(ERROR, \"Unsupported type %c for argument no %d\",\n\t\t\t\t\t\t\t\t argmodes[i], i);\n\t\t\t\t\t}\t\t\t\t\t\n\t\t\t\t\tprodesc->arg_argmode[i] = argmodes[i];\n\t\t\t\t}\n\t\t\telse\n\t\t\t\tMemSet(prodesc->arg_argmode, PROARGMODE_IN,\n\t\t\t\t \t prodesc->n_total_args);\n\n\t\t\t/* Allocate memory for argument names unless all of them are OUT*/\n\t\t\tif (argnames && prodesc->n_total_args > 0)\n\t\t\t\taliases = palloc((NAMEDATALEN + 32) * prodesc->n_total_args);\n\t\t\t\n\t\t\t/* Main argument processing loop. */\n\t\t\tfor (i = 0; i < prodesc->n_total_args; i++)\n\t\t\t{\n\t\t\t\tprodesc->arg_typtype[i] = get_typtype(argtypes[i]);\n\t\t\t\tif (prodesc->arg_typtype[i] != TYPTYPE_COMPOSITE)\n\t\t\t\t{\t\t\t\t\t\t\t\n\t\t\t\t\tget_type_io_data(argtypes[i],\n\t\t\t\t\t\t\t\t\t IOFunc_output,\n\t\t\t\t\t\t\t\t\t &typlen,\n\t\t\t\t\t\t\t\t\t &typbyval,\n\t\t\t\t\t\t\t\t\t &typalign,\n\t\t\t\t\t\t\t\t\t &typdelim,\n\t\t\t\t\t\t\t\t\t &typioparam,\n\t\t\t\t\t\t\t\t\t &typoutput);\n\t\t\t\t\tperm_fmgr_info(typoutput, &(prodesc->arg_out_func[i]));\n\t\t\t\t\tprodesc->arg_typioparam[i] = typioparam;\n\t\t\t\t}\n\t\t\t\tif (aliases && argnames[i][0] != '\\0')\n\t\t\t\t{\n\t\t\t\t\tif (!is_valid_php_identifier(argnames[i]))\n\t\t\t\t\t\telog(ERROR, \"\\\"%s\\\" can not be used as a PHP variable name\",\n\t\t\t\t\t\t\t argnames[i]);\n\t\t\t\t\t/* Deal with argument name */\n\t\t\t\t\talias_str_end += snprintf(aliases + alias_str_end,\n\t\t\t\t\t\t\t\t\t\t \t NAMEDATALEN + 32,\n\t\t\t\t\t\t\t\t \t\t \t \" $%s = &$args[%d];\", \n\t\t\t\t\t\t\t\t\t\t\t argnames[i], i);\n\t\t\t\t}\n\t\t\t\tif ((prodesc->arg_argmode[i] == PROARGMODE_OUT ||\n\t\t\t\t\t prodesc->arg_argmode[i] == PROARGMODE_INOUT) && !prodesc->retset)\n\t\t\t\t{\n\t\t\t\t\t/* Initialiazation for OUT arguments aliases */\n\t\t\t\t\tif (!out_return_str)\n\t\t\t\t\t{\n\t\t\t\t\t\t/* Generate return statment for a single OUT argument */\n\t\t\t\t\t\tout_return_str = palloc(NAMEDATALEN + 32);\n\t\t\t\t\t\tif (prodesc->n_out_args + prodesc->n_mixed_args == 1)\n\t\t\t\t\t\t\tsnprintf(out_return_str, NAMEDATALEN + 32,\n\t\t\t\t\t\t\t\t\t \"return $args[%d];\", i);\n\t\t\t\t\t\telse\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t/* PL/PHP deals with multiple OUT arguments by\n\t\t\t\t\t\t\t * internally creating an array of references to them.\n\t\t\t\t\t\t\t * E.g. out_fn(a out integer, b out integer )\n\t\t\t\t\t\t\t * translates into:\n\t\t\t\t\t\t\t * $_plphp_ret_out_fn_1234=array(a => $&a,b => $&b);\n\t\t\t\t\t\t\t */\n\t\t\t\t\t\t\tchar plphp_ret_array_name[NAMEDATALEN + 16];\n\n\t\t\t\t\t\t\tint array_namelen = snprintf(plphp_ret_array_name,\n\t\t\t\t\t\t\t \t\t\t\t\t\t \t NAMEDATALEN + 16,\n\t\t\t\t\t\t\t\t\t \t\t\t\t \t \"_plphp_ret_%s\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t \t internal_proname);\n\n\t\t\t\t\t\t\tsnprintf(out_return_str, array_namelen + 16,\n\t\t\t\t\t\t\t\t\t\"return $%s;\", plphp_ret_array_name);\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t/* 2 NAMEDATALEN for argument names, additional\n\t\t\t\t\t\t\t * 16 bytes per each argument for assignment string,\n\t\t\t\t\t\t\t * additional 16 bytes for the 'array' prefix string.\n\t\t\t\t\t\t\t */\t\t\n\t\t\t\t\t\t\tout_aliases = palloc(array_namelen +\n\t\t\t\t\t\t\t\t\t\t\t\t (prodesc->n_out_args + \n\t\t\t\t\t\t\t\t\t\t\t\t prodesc->n_mixed_args) *\n\t\t\t\t\t\t\t\t\t\t\t\t (2*NAMEDATALEN + 16) + 16);\n\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\tout_str_end = snprintf(out_aliases,\n\t\t\t\t\t\t\t \t\t\t\t\t array_namelen +\n\t\t\t\t\t\t\t\t\t\t\t\t (2 * NAMEDATALEN + 16) + 16,\n\t\t\t\t\t\t\t\t\t\t\t\t \"$%s = array(&$args[%d]\", \n\t\t\t\t\t\t\t\t\t\t\t\t plphp_ret_array_name, i);\n\t\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t}\n\t\t\t\t\t} \n\t\t\t\t\telse if (out_aliases)\n\t\t\t\t\t{\n\t\t\t\t\t /* Add new elements to the array of aliases for OUT args */\n\t\t\t\t\t\tAssert(prodesc->n_out_args + prodesc->n_mixed_args > 1);\n\t\t\t\t\t\tout_str_end += snprintf(out_aliases+out_str_end,\n\t\t\t\t\t\t\t\t\t\t\t\t2 * NAMEDATALEN + 16,\n\t\t\t\t\t\t\t\t\t\t\t\t\",&$args[%d]\", i);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (aliases)\n\t\t\t\tstrcat(aliases, \" \");\n\t\t\tif (out_aliases)\n\t\t\t\tstrcat(out_aliases, \")\");\n\t\t}\n\n\t\t/*\n\t\t * Create the text of the PHP function. We do not use the same\n\t\t * function name, because that would prevent function overloading.\n\t\t * Sadly this also prevents PL/php functions from calling each other\n\t\t * easily.\n\t\t */\n\t\tprosrcdatum = SysCacheGetAttr(PROCOID, procTup,\n\t\t\t\t\t\t\t\t\t Anum_pg_proc_prosrc, &isnull);\n\t\tif (isnull)\n\t\t\telog(ERROR, \"cache lookup yielded NULL prosrc\");\n\n\t\tproc_source = DatumGetCString(DirectFunctionCall1(textout,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t prosrcdatum));\n\n\t\t/* Create the procedure in the interpreter */\n\t\tcomplete_proc_source =\n\t\t\t(char *) palloc(strlen(proc_source) +\n\t\t\t\t\t\t\tstrlen(internal_proname) +\n\t\t\t\t\t\t\t(aliases ? strlen(aliases) : 0) + \n\t\t\t\t\t\t\t(out_aliases ? strlen(out_aliases) : 0) +\n\t\t\t\t\t\t\tstrlen(\"function ($args, $argc){ } \") + 32 +\n\t\t\t\t\t\t\t(out_return_str ? strlen(out_return_str) : 0));\n\n\t\t/* XXX Is this usage of sprintf safe? */\n\t\tif (is_trigger)\n\t\t\tsprintf(complete_proc_source, \"function %s($_TD){%s}\",\n\t\t\t\t\tinternal_proname, proc_source);\n\t\telse\n\t\t\tsprintf(complete_proc_source, \n\t\t\t\t\t\"function %s($args, $argc){%s %s;%s; %s}\",\n\t\t\t\t\tinternal_proname, \n\t\t\t\t\taliases ? aliases : \"\",\n\t\t\t\t\tout_aliases ? out_aliases : \"\",\n\t\t\t\t\tproc_source, \n\t\t\t\t\tout_return_str? out_return_str : \"\");\n\t\t\t\t\t\n\t\telog(LOG, \"complete_proc_source = %s\",\n\t\t\t\t \t complete_proc_source);\n\t\t\t\t\n\t\tzend_hash_del(CG(function_table), prodesc->proname,\n\t\t\t\t\t strlen(prodesc->proname) + 1);\n\n\t\tpointer = (char *) palloc(64);\n\t\tsprintf(pointer, \"%p\", (void *) prodesc);\n\t\tadd_assoc_string(plphp_proc_array, internal_proname,\n\t\t\t\t\t\t (char *) pointer, 1);\n\n\t\tif (zend_eval_string(complete_proc_source, NULL,\n\t\t\t\t\t\t\t \"plphp function source\" TSRMLS_CC) == FAILURE)\n\t\t{\n\t\t\t/* the next compilation will blow it up */\n\t\t\tprodesc->fn_xmin = InvalidTransactionId;\n\t\t\telog(ERROR, \"unable to compile function \\\"%s\\\"\",\n\t\t\t\t\t prodesc->proname);\n\t\t}\n\n\t\tif (aliases)\n\t\t\tpfree(aliases);\n\t\tif (out_aliases)\n\t\t\tpfree(out_aliases);\n\t\tif (out_return_str)\n\t\t\tpfree(out_return_str);\n\t\tpfree(complete_proc_source);\n\t}\n\n\tReleaseSysCache(procTup);\n\n\treturn prodesc;\n}\n\n/*\n * plphp_func_build_args\n * \t\tBuild a PHP array representing the arguments to the function\n */\nstatic zval *\nplphp_func_build_args(plphp_proc_desc *desc, FunctionCallInfo fcinfo TSRMLS_DC)\n{\n\tzval\t *retval;\n\tint\t\t\ti,j;\n\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\t/* \n\t * The first var iterates over every argument, the second one - over the \n\t * IN or INOUT ones only\n\t */\n\tfor (i = 0, j = 0; i < desc->n_total_args; \n\t\t (j = IS_ARGMODE_OUT(desc->arg_argmode[i]) ? j : j + 1), i++)\n\t{\n\t\t/* Assing NULLs to OUT or TABLE arguments initially */\n\t\tif (IS_ARGMODE_OUT(desc->arg_argmode[i]))\n\t\t{\n\t\t\tadd_next_index_unset(retval);\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (desc->arg_typtype[i] == TYPTYPE_PSEUDO)\n\t\t{\n\t\t\tHeapTuple\ttypeTup;\n\t\t\tForm_pg_type typeStruct;\n\n\t\t\ttypeTup = SearchSysCache(TYPEOID,\n\t\t\t\t\t\t\t\t\t ObjectIdGetDatum(get_fn_expr_argtype\n\t\t\t\t\t\t\t\t\t\t\t\t\t (fcinfo->flinfo, j)),\n\t\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\t\ttypeStruct = (Form_pg_type) GETSTRUCT(typeTup);\n\t\t\tperm_fmgr_info(typeStruct->typoutput,\n\t\t\t\t\t\t &(desc->arg_out_func[i]));\n\t\t\tdesc->arg_typioparam[i] = typeStruct->typelem;\n\t\t\tReleaseSysCache(typeTup);\n\t\t}\n\n\t\tif (desc->arg_typtype[i] == TYPTYPE_COMPOSITE)\n\t\t{\n\t\t\tif (fcinfo->argnull[j])\n\t\t\t\tadd_next_index_unset(retval);\n\t\t\telse\n\t\t\t{\n\t\t\t\tHeapTupleHeader\ttd;\n\t\t\t\tOid\t\t\t\ttupType;\n\t\t\t\tint32\t\t\ttupTypmod;\n\t\t\t\tTupleDesc\t\ttupdesc;\n\t\t\t\tHeapTupleData\ttmptup;\n\t\t\t\tzval\t\t *hashref;\n\n\t\t\t\ttd = DatumGetHeapTupleHeader(fcinfo->arg[j]);\n\n\t\t\t\t/* Build a temporary HeapTuple control structure */\n\t\t\t\ttmptup.t_len = HeapTupleHeaderGetDatumLength(td);\n\t\t\t\ttmptup.t_data = DatumGetHeapTupleHeader(fcinfo->arg[j]);\n\n\t\t\t\t/* Extract rowtype info and find a tupdesc */\n\t\t\t\ttupType = HeapTupleHeaderGetTypeId(tmptup.t_data);\n\t\t\t\ttupTypmod = HeapTupleHeaderGetTypMod(tmptup.t_data);\n\t\t\t\ttupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);\n\n\t\t\t\t/* Build the PHP hash */\n\t\t\t\thashref = plphp_build_tuple_argument(&tmptup, tupdesc);\n\t\t\t\tzend_hash_next_index_insert(retval->value.ht,\n\t\t\t\t\t\t\t\t\t\t\t(void *) &hashref,\n\t\t\t\t\t\t\t\t\t\t\tsizeof(zval *), NULL);\n\t\t\t\t/* Finally release the acquired tupledesc */\n\t\t\t\tReleaseTupleDesc(tupdesc);\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\tif (fcinfo->argnull[j])\n\t\t\t\tadd_next_index_unset(retval);\n\t\t\telse\n\t\t\t{\n\t\t\t\tchar\t *tmp;\n\n\t\t\t\t/*\n\t\t\t\t * TODO room for improvement here: instead of going through the\n\t\t\t\t * output function, figure out if we can just use the native\n\t\t\t\t * representation to pass to PHP.\n\t\t\t\t */\n\n\t\t\t\ttmp =\n\t\t\t\t\tDatumGetCString(FunctionCall3\n\t\t\t\t\t\t\t\t\t(&(desc->arg_out_func[i]),\n\t\t\t\t\t\t\t\t\t fcinfo->arg[j],\n\t\t\t\t\t\t\t\t\t ObjectIdGetDatum(desc->arg_typioparam[i]),\n\t\t\t\t\t\t\t\t\t Int32GetDatum(-1)));\n\t\t\t\t/*\n\t\t\t\t * FIXME -- this is bogus. Not every value starting with { is\n\t\t\t\t * an array. Figure out a better method for detecting arrays.\n\t\t\t\t */\n\t\t\t\tif (tmp[0] == '{')\n\t\t\t\t{\n\t\t\t\t\tzval\t *hashref;\n\n\t\t\t\t\thashref = plphp_convert_from_pg_array(tmp TSRMLS_CC);\n\t\t\t\t\tzend_hash_next_index_insert(retval->value.ht,\n\t\t\t\t\t\t\t\t\t\t\t\t(void *) &hashref,\n\t\t\t\t\t\t\t\t\t\t\t\tsizeof(zval *), NULL);\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t\tadd_next_index_string(retval, tmp, 1);\n\n\t\t\t\t/*\n\t\t\t\t * FIXME - figure out which parameters are passed by\n\t\t\t\t * reference and need freeing\n\t\t\t\t */\n\t\t\t\t/* pfree(tmp); */\n\t\t\t}\n\t\t}\n\t}\n\n\treturn retval;\n}\n\n/*\n * plphp_call_php_func\n * \t\tBuild the function argument array and call the PHP function.\n *\n * We use a private PHP symbol table, so that we can easily destroy everything\n * used during the execution of the function. We use it to collect the\n * arguments' zvals as well. We exclude the return value, because it will be\n * used by the caller -- it must be freed there!\n */\nstatic zval *\nplphp_call_php_func(plphp_proc_desc *desc, FunctionCallInfo fcinfo TSRMLS_DC)\n{\n\tzval\t *retval;\n\tzval\t *args;\n\tzval\t *argc;\n\tzval\t *funcname;\n\tzval\t **params[2];\n\tchar\t\tcall[64];\n\tHashTable *orig_symbol_table;\n\tHashTable *symbol_table;\n\n\tREPORT_PHP_MEMUSAGE(\"going to build function args\");\n\n\tALLOC_HASHTABLE(symbol_table);\n\tzend_hash_init(symbol_table, 0, NULL, ZVAL_PTR_DTOR, 0);\n\n\t/*\n\t * Build the function arguments. Save a pointer to each new zval in our\n\t * private symbol table, so that we can clean up easily later.\n\t */\n\targs = plphp_func_build_args(desc, fcinfo TSRMLS_CC);\n\tzend_hash_update(symbol_table, \"args\", strlen(\"args\") + 1,\n\t\t\t\t\t (void *) &args, sizeof(zval *), NULL);\n\n\tREPORT_PHP_MEMUSAGE(\"args built. Now the rest ...\");\n\n\tMAKE_STD_ZVAL(argc);\n\tZVAL_LONG(argc, desc->n_total_args);\n\tzend_hash_update(symbol_table, \"argc\", strlen(\"argc\") + 1,\n\t\t\t\t\t (void *) &argc, sizeof(zval *), NULL);\n\n\tparams[0] = &args;\n\tparams[1] = &argc;\n\n\t/* Build the internal function name, and save for later cleaning */\n\tsprintf(call, \"plphp_proc_%u\", fcinfo->flinfo->fn_oid);\n\tMAKE_STD_ZVAL(funcname);\n\tZVAL_STRING(funcname, call, 1);\n\tzend_hash_update(symbol_table, \"funcname\", strlen(\"funcname\") + 1,\n\t\t\t\t\t (void *) &funcname, sizeof(zval *), NULL);\n\n\tREPORT_PHP_MEMUSAGE(\"going to call the function\");\n\n\torig_symbol_table = EG(active_symbol_table);\n\tEG(active_symbol_table) = symbol_table;\n\n\tsaved_symbol_table = EG(active_symbol_table);\n\n\t/* XXX: why no_separation param is 1 is this call ? */\n\tif (call_user_function_ex(CG(function_table), NULL, funcname, &retval,\n\t\t\t\t\t\t\t 2, params, 1, symbol_table TSRMLS_CC) == FAILURE)\n\t\telog(ERROR, \"could not call function \\\"%s\\\"\", call);\n\n\tREPORT_PHP_MEMUSAGE(\"going to free some vars\");\n\n\tsaved_symbol_table = NULL;\n\n\t/* Return to the original symbol table, and clean our private one */\n\tEG(active_symbol_table) = orig_symbol_table;\n\tzend_hash_clean(symbol_table);\n\n\tREPORT_PHP_MEMUSAGE(\"function call done\");\n\n\treturn retval;\n}\n\n/*\n * plphp_call_php_trig\n * \t\tBuild trigger argument array and call the PHP function as a\n * \t\ttrigger.\n *\n * Note we don't need to change the symbol table here like we do in\n * plphp_call_php_func, because we do manual cleaning of each zval used.\n */\nstatic zval *\nplphp_call_php_trig(plphp_proc_desc *desc, FunctionCallInfo fcinfo,\n\t\t\t\t\tzval *trigdata TSRMLS_DC)\n{\n\tzval\t *retval;\n\tzval\t *funcname;\n\tchar\t\tcall[64];\n\tzval\t **params[1];\n\n\tparams[0] = &trigdata;\n\n\t/* Build the internal function name, and save for later cleaning */\n\tsprintf(call, \"plphp_proc_%u_trigger\", fcinfo->flinfo->fn_oid);\n\tMAKE_STD_ZVAL(funcname);\n\tZVAL_STRING(funcname, call, 0);\n\n\t/*\n\t * HACK: mark trigdata as a reference, so it won't be copied in\n\t * call_user_function_ex. This way the user function will be able to \n\t * modify it, in order to change NEW.\n\t */\n\tZ_SET_ISREF_P(trigdata);\n\n\tif (call_user_function_ex(CG(function_table), NULL, funcname, &retval,\n\t\t\t\t\t\t\t 1, params, 1, NULL TSRMLS_CC) == FAILURE)\n\t\telog(ERROR, \"could not call function \\\"%s\\\"\", call);\n\n\tFREE_ZVAL(funcname);\n\n\t/* Return to the original state */\n\tZ_UNSET_ISREF_P(trigdata);\n\n\treturn retval;\n}\n\n/*\n * plphp_error_cb\n *\n * A callback for PHP error handling. This is called when the php_error or\n * zend_error function is invoked in our code. Ideally this function should\n * clean up the PHP state after an ERROR, but zend_try blocks do not seem\n * to work as I'd expect. So for now, we degrade the error to WARNING and \n * continue executing in the hope that the system doesn't crash later.\n *\n * Note that we do clean up some PHP state by hand but it doesn't seem to\n * work as expected either.\n */\nvoid\nplphp_error_cb(int type, const char *filename, const uint lineno,\n\t \t\t const char *fmt, va_list args)\n{\n\tchar\tstr[1024];\n\tint\t\televel;\n\n\tvsnprintf(str, 1024, fmt, args);\n\n\t/*\n\t * PHP error classification is a bitmask, so this conversion is a bit\n\t * bogus. However, most calls to php_error() use a single bit.\n\t * Whenever more than one is used, we will default to ERROR, so this is\n\t * safe, if a bit excessive.\n\t *\n\t * XXX -- I wonder whether we should promote the WARNINGs to errors as\n\t * well. PHP has a really stupid way of continuing execution in presence\n\t * of severe problems that I don't see why we should maintain.\n\t */\n\tswitch (type)\n\t{\n\t\tcase E_ERROR:\n\t\tcase E_CORE_ERROR:\n\t\tcase E_COMPILE_ERROR:\n\t\tcase E_USER_ERROR:\n\t\tcase E_PARSE:\n\t\t\televel = ERROR;\n\t\t\tbreak;\n\t\tcase E_WARNING:\n\t\tcase E_CORE_WARNING:\n\t\tcase E_COMPILE_WARNING:\n\t\tcase E_USER_WARNING:\n\t\tcase E_STRICT:\n\t\t\televel = WARNING;\n\t\t\tbreak;\n\t\tcase E_NOTICE:\n\t\tcase E_USER_NOTICE:\n\t\t\televel = NOTICE;\n\t\t\tbreak;\n\t\tdefault:\n\t\t\televel = ERROR;\n\t\t\tbreak;\n\t}\n\n\tREPORT_PHP_MEMUSAGE(\"reporting error\");\n\n\t/*\n\t * If this is a severe problem, we need to make PHP aware of it, so first\n\t * save the error message and then bail out of the PHP block. With luck,\n\t * this will be trapped by a zend_try/zend_catch block outwards in PL/php\n\t * code, which would translate it to a Postgres elog(ERROR), leaving\n\t * everything in a consistent state.\n\t *\n\t * For this to work, there must be a try/catch block covering every place\n\t * where PHP may raise an error!\n\t */\n\tif (elevel >= ERROR)\n\t{\n\t\tif (lineno != 0)\n\t\t{\n\t\t\tchar\tmsgline[1024];\n\t\t\tsnprintf(msgline, sizeof(msgline), \"%s at line %d\", str, lineno);\n\t\t\terror_msg = pstrdup(msgline);\n\t\t}\n\t\telse\n\t\t\terror_msg = pstrdup(str);\n\n\t\tzend_bailout();\n\t}\n\n\tereport(elevel,\n\t\t\t(errmsg(\"plphp: %s\", str)));\n}\n\n/* Check if the name can be a valid PHP variable name */\nstatic bool \nis_valid_php_identifier(char *name)\n{\n\tint \tlen,\n\t\t\ti;\n\t\n\tAssert(name);\n\n\tlen = strlen(name);\n\n\t/* Should start from the letter */\n\tif (!isalpha(name[0]))\n\t\treturn false;\n\tfor (i = 1; i < len; i++)\n\t{\n\t\t/* Only letters, digits and underscores are allowed */\n\t\tif (!isalpha(name[i]) && !isdigit(name[i]) && name[i] != '_')\n\t\t\treturn false;\n\t}\n\treturn true;\n}\n\n/*\n * vim:ts=4:sw=4:cino=(0\n */\n", "plphp_io.c": "/**********************************************************************\n * plphp_io.c\n *\n * Support functions for PL/php -- mainly functions to convert stuff\n * from the PHP representation to PostgreSQL representation and vice\n * versa, either text or binary representations.\n *\n * $Id$\n *\n **********************************************************************/\n\n#include \"postgres.h\"\n#include \"plphp_io.h\"\n\n#include \"catalog/pg_type.h\"\n#include \"executor/spi.h\"\n#include \"funcapi.h\"\n#include \"lib/stringinfo.h\"\n#include \"utils/lsyscache.h\"\n#include \"utils/rel.h\"\n#include \"utils/syscache.h\"\n#include \"utils/memutils.h\"\n\n/*\n * plphp_zval_from_tuple\n *\t\t Build a PHP hash from a tuple.\n */\nzval *\nplphp_zval_from_tuple(HeapTuple tuple, TupleDesc tupdesc)\n{\n\tint\t\t\ti;\n\tchar\t *attname = NULL;\n\tzval\t *array;\n\n\tMAKE_STD_ZVAL(array);\n\tarray_init(array);\n\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\tchar *attdata;\n\n\t\t/* Get the attribute name */\n\t\tattname = tupdesc->attrs[i]->attname.data;\n\n\t\t/* and get its value */\n\t\tif ((attdata = SPI_getvalue(tuple, tupdesc, i + 1)) != NULL)\n\t\t{\n\t\t\t/* \"true\" means strdup the string */\n\t\t\tadd_assoc_string(array, attname, attdata, true);\n\t\t\tpfree(attdata);\n\t\t}\n\t\telse\n\t\t\tadd_assoc_null(array, attname);\n\t}\n\treturn array;\n}\n\n/*\n * plphp_htup_from_zval\n * \t\tBuild a HeapTuple from a zval (which must be an array) and a TupleDesc.\n *\n * The return HeapTuple is allocated in the current memory context and must\n * be freed by the caller.\n *\n * If zval doesn't contain any of the element names from the TupleDesc,\n * build a tuple from the first N elements. This allows us to accept\n * arrays in form array(1,2,3) as the result of functions with OUT arguments.\n * XXX -- possible optimization: keep the memory context created and only\n * reset it between calls.\n */\nHeapTuple\nplphp_htup_from_zval(zval *val, TupleDesc tupdesc)\n{\n\tMemoryContext\toldcxt;\n\tMemoryContext\ttmpcxt;\n\tHeapTuple\t\tret;\n\tAttInMetadata *attinmeta;\n\tHashPosition\tpos;\n\tzval\t\t **element;\n\tchar\t\t **values;\n\tint\t\t\t\ti;\n\tbool\t\t\tallempty = true;\n\n\ttmpcxt = AllocSetContextCreate(TopTransactionContext,\n\t\t\t\t\t\t\t\t \"htup_from_zval cxt\",\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MINSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_INITSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MAXSIZE);\n\toldcxt = MemoryContextSwitchTo(tmpcxt);\n\n\tvalues = (char **) palloc(tupdesc->natts * sizeof(char *));\n\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\tchar *key = SPI_fname(tupdesc, i + 1);\n\t\tzval *scalarval = plphp_array_get_elem(val, key);\n\n\t\tvalues[i] = plphp_zval_get_cstring(scalarval, true, true);\n\t\t/* \n\t\t * Reset the flag is even one of the keys actually exists,\n\t\t * even if it is NULL.\n\t\t */\n\t\tif (scalarval != NULL)\n\t\t\tallempty = false;\n\t}\n\t/* None of the names from the tuple exists,\n\t * try to get 1st N array elements and assign them to the tuple\n\t */\n\tif (allempty)\n\t\tfor (i = 0, \n\t\t\t zend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(val), &pos);\n\t\t\t (zend_hash_get_current_data_ex(Z_ARRVAL_P(val), \n\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t\t&pos) == SUCCESS) && \n\t\t\t(i < tupdesc->natts);\n\t\t\tzend_hash_move_forward_ex(Z_ARRVAL_P(val), &pos), i++)\n\t\t\tvalues[i] = plphp_zval_get_cstring(element[0], true, true);\n\n\tattinmeta = TupleDescGetAttInMetadata(tupdesc);\n\n\tMemoryContextSwitchTo(oldcxt);\n\tret = BuildTupleFromCStrings(attinmeta, values);\n\n\tMemoryContextDelete(tmpcxt);\n\n\treturn ret;\n}\n\n\n/* plphp_srf_htup_from_zval\n * \t\tBuild a tuple from a zval and a TupleDesc, for a SRF.\n *\n * Like above, but we don't use the names of the array attributes;\n * rather we build the tuple in order. Also, we get a MemoryContext\n * from the caller and just clean it at return, rather than building it each\n * time.\n */\nHeapTuple\nplphp_srf_htup_from_zval(zval *val, AttInMetadata *attinmeta,\n\t\t\t\t\t\t MemoryContext cxt)\n{\n\tMemoryContext\toldcxt;\n\tHeapTuple\t\tret;\n\tHashPosition\tpos;\n\tchar\t\t **values;\n\tzval\t\t **element;\n\tint\t\t\t\ti = 0;\n\n\toldcxt = MemoryContextSwitchTo(cxt);\n\n\t/*\n\t * Use palloc0 to initialize values to NULL, just in case the user does\n\t * not pass all needed attributes\n\t */\n\tvalues = (char **) palloc0(attinmeta->tupdesc->natts * sizeof(char *));\n\n\t/*\n\t * If the input zval is an array, build a tuple using each element as an\n\t * attribute. Exception: if the return tuple has a single element and\n\t * it's an array type, use the whole array as a single value.\n\t *\n\t * If the input zval is a scalar, use it as an element directly.\n\t */\n\tif (Z_TYPE_P(val) == IS_ARRAY)\n\t{\n\t\tif (attinmeta->tupdesc->natts == 1)\n\t\t{\n\t\t\t/* Is it an array? */\n\t\t\tif (attinmeta->tupdesc->attrs[0]->attndims != 0 ||\n\t\t\t\t!OidIsValid(get_element_type(attinmeta->tupdesc->attrs[0]->atttypid)))\n\t\t\t{\n\t\t\t\tzend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(val), &pos);\n\t\t\t\tzend_hash_get_current_data_ex(Z_ARRVAL_P(val),\n\t\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t\t &pos);\n\t\t\t\tvalues[0] = plphp_zval_get_cstring(element[0], true, true);\n\t\t\t}\n\t\t\telse\n\t\t\t\tvalues[0] = plphp_zval_get_cstring(val, true, true);\n\t\t}\n\t\telse\n\t\t{\n\t\t\t/*\n\t\t\t * Ok, it's an array and the return tuple has more than one\n\t\t\t * attribute, so scan each array element.\n\t\t\t */\n\t\t\tfor (zend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(val), &pos);\n\t\t\t\t zend_hash_get_current_data_ex(Z_ARRVAL_P(val),\n\t\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t\t &pos) == SUCCESS;\n\t\t\t\t zend_hash_move_forward_ex(Z_ARRVAL_P(val), &pos))\n\t\t\t{\n\t\t\t\t/* avoid overrunning the palloc'ed chunk */\n\t\t\t\tif (i >= attinmeta->tupdesc->natts)\n\t\t\t\t{\n\t\t\t\t\telog(WARNING, \"more elements in array than attributes in return type\");\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\n\t\t\t\tvalues[i++] = plphp_zval_get_cstring(element[0], true, true);\n\t\t\t}\n\t\t}\n\t}\n\telse\n\t{\n\t\t/* The passed zval is not an array -- use as the only attribute */\n\t\tif (attinmeta->tupdesc->natts != 1)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errmsg(\"returned array does not correspond to \"\n\t\t\t\t\t\t\t\"declared return value\")));\n\n\t\tvalues[0] = plphp_zval_get_cstring(val, true, true);\n\t}\n\n\tMemoryContextSwitchTo(oldcxt);\n\n\tret = BuildTupleFromCStrings(attinmeta, values);\n\n\tMemoryContextReset(cxt);\n\n\treturn ret;\n}\n\n/*\n * plphp_convert_to_pg_array\n * \t\tConvert a zval into a Postgres text array representation.\n *\n * The return value is palloc'ed in the current memory context and\n * must be freed by the caller.\n */\nchar *\nplphp_convert_to_pg_array(zval *array)\n{\n\tint\t\t\tarr_size;\n\tzval\t **element;\n\tint\t\t\ti = 0;\n\tHashPosition \tpos;\n\tStringInfoData\tstr;\n\t\n\tinitStringInfo(&str);\n\n\tarr_size = zend_hash_num_elements(Z_ARRVAL_P(array));\n\n\tappendStringInfoChar(&str, '{');\n\tif (Z_TYPE_P(array) == IS_ARRAY)\n\t{\n\t\tfor (zend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(array), &pos);\n\t\t\t zend_hash_get_current_data_ex(Z_ARRVAL_P(array),\n\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t &pos) == SUCCESS;\n\t\t\t zend_hash_move_forward_ex(Z_ARRVAL_P(array), &pos))\n\t\t{\n\t\t\tchar *tmp;\n\n\t\t\tswitch (Z_TYPE_P(element[0]))\n\t\t\t{\n\t\t\t\tcase IS_LONG:\n\t\t\t\t\tappendStringInfo(&str, \"%li\", element[0]->value.lval);\n\t\t\t\t\tbreak;\n\t\t\t\tcase IS_DOUBLE:\n\t\t\t\t\tappendStringInfo(&str, \"%f\", element[0]->value.dval);\n\t\t\t\t\tbreak;\n\t\t\t\tcase IS_STRING:\n\t\t\t\t\tappendStringInfo(&str, \"\\\"%s\\\"\", element[0]->value.str.val);\n\t\t\t\t\tbreak;\n\t\t\t\tcase IS_ARRAY:\n\t\t\t\t\ttmp = plphp_convert_to_pg_array(element[0]);\n\t\t\t\t\tappendStringInfo(&str, \"%s\", tmp);\n\t\t\t\t\tpfree(tmp);\n\t\t\t\t\tbreak;\n\t\t\t\tdefault:\n\t\t\t\t\telog(ERROR, \"unrecognized element type %d\",\n\t\t\t\t\t\t Z_TYPE_P(element[0]));\n\t\t\t}\n\n\t\t\tif (i != arr_size - 1)\n\t\t\t\tappendStringInfoChar(&str, ',');\n\t\t\ti++;\n\t\t}\n\t}\n\n\tappendStringInfoChar(&str, '}');\n\n\treturn str.data;\n}\n\n/*\n * plphp_convert_from_pg_array\n * \t\tConvert a Postgres text array representation to a PHP array\n * \t\t(zval type thing).\n *\n * FIXME -- does not work if there are embedded {'s in the input value.\n *\n * FIXME -- does not correctly quote/dequote the values\n */\nzval *\nplphp_convert_from_pg_array(char *input TSRMLS_DC)\n{\n\tzval\t *retval = NULL;\n\tint\t\t\ti;\n\tStringInfoData str;\n\t\n\tinitStringInfo(&str);\n\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\tfor (i = 0; i < strlen(input); i++)\n\t{\n\t\tif (input[i] == '{')\n\t\t\tappendStringInfoString(&str, \"array(\");\n\t\telse if (input[i] == '}')\n\t\t\tappendStringInfoChar(&str, ')');\n\t\telse\n\t\t\tappendStringInfoChar(&str, input[i]);\n\t}\n\tappendStringInfoChar(&str, ';');\n\n\tif (zend_eval_string(str.data, retval,\n\t\t\t\t\t\t \"plphp array input parameter\" TSRMLS_CC) == FAILURE)\n\t\telog(ERROR, \"plphp: convert to internal representation failure\");\n\n\tpfree(str.data);\n\n\treturn retval;\n}\n\n/*\n * plphp_array_get_elem\n * \t\tReturn a pointer to the array element with the given key\n */\nzval *\nplphp_array_get_elem(zval *array, char *key)\n{\n\tzval\t **element;\n\n\tif (!array)\n\t\telog(ERROR, \"passed zval is not a valid pointer\");\n\tif (Z_TYPE_P(array) != IS_ARRAY)\n\t\telog(ERROR, \"passed zval is not an array\");\n\n\tif (zend_symtable_find(array->value.ht,\n\t\t\t\t\t \t key,\n\t\t\t\t\t strlen(key) + 1,\n\t\t\t\t\t (void **) &element) != SUCCESS)\n\t\treturn NULL;\n\n\treturn element[0];\n}\n\n/*\n * zval_get_cstring\n *\t\tGet a C-string representation of a zval.\n *\n * All return values, except those that are NULL, are palloc'ed in the current\n * memory context and must be freed by the caller.\n *\n * If the do_array parameter is false, then array values will not be converted\n * and an error will be raised instead.\n *\n * If the null_ok parameter is true, we will return NULL for a NULL zval.\n * Otherwise we raise an error.\n */\nchar *\nplphp_zval_get_cstring(zval *val, bool do_array, bool null_ok)\n{\n\tchar *ret;\n\n\tif (!val)\n\t{\n\t\tif (null_ok)\n\t\t\treturn NULL;\n\t\telse\n\t\t\telog(ERROR, \"invalid zval pointer\");\n\t}\n\n\tswitch (Z_TYPE_P(val))\n\t{\n\t\tcase IS_NULL:\n\t\t\treturn NULL;\n\t\tcase IS_LONG:\n\t\t\tret = palloc(64);\n\t\t\tsnprintf(ret, 64, \"%ld\", Z_LVAL_P(val));\n\t\t\tbreak;\n\t\tcase IS_DOUBLE:\n\t\t\tret = palloc(64);\n\t\t\tsnprintf(ret, 64, \"%f\", Z_DVAL_P(val));\n\t\t\tbreak;\n\t\tcase IS_BOOL:\n\t\t\tret = palloc(8);\n\t\t\tsnprintf(ret, 8, \"%s\", Z_BVAL_P(val) ? \"true\": \"false\");\n\t\t\tbreak;\n\t\tcase IS_STRING:\n\t\t\tret = palloc(Z_STRLEN_P(val) + 1);\n\t\t\tsnprintf(ret, Z_STRLEN_P(val) + 1, \"%s\", \n\t\t\t\t\t Z_STRVAL_P(val));\n\t\t\tbreak;\n\t\tcase IS_ARRAY:\n\t\t\tif (!do_array)\n\t\t\t\telog(ERROR, \"can't stringize array value\");\n\t\t\tret = plphp_convert_to_pg_array(val);\n\t\t\tbreak;\n\t\tdefault:\n\t\t\t/* keep compiler quiet */\n\t\t\tret = NULL;\n\t\t\telog(ERROR, \"can't stringize value of type %d\", val->type);\n\t}\n\n\treturn ret;\n}\n\n/*\n * plphp_build_tuple_argument\n *\n * Build a PHP array from all attributes of a given tuple\n */\nzval *\nplphp_build_tuple_argument(HeapTuple tuple, TupleDesc tupdesc)\n{\n\tint\t\t\ti;\n\tzval\t *output;\n\tDatum\t\tattr;\n\tbool\t\tisnull;\n\tchar\t *attname;\n\tchar\t *outputstr;\n\tHeapTuple\ttypeTup;\n\tOid\t\t\ttypoutput;\n\tOid\t\t\ttypioparam;\n\n\tMAKE_STD_ZVAL(output);\n\tarray_init(output);\n\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\t/* Ignore dropped attributes */\n\t\tif (tupdesc->attrs[i]->attisdropped)\n\t\t\tcontinue;\n\n\t\t/* Get the attribute name */\n\t\tattname = tupdesc->attrs[i]->attname.data;\n\n\t\t/* Get the attribute value */\n\t\tattr = heap_getattr(tuple, i + 1, tupdesc, &isnull);\n\n\t\t/* If it is null, set it to undef in the hash. */\n\t\tif (isnull)\n\t\t{\n\t\t\tadd_next_index_unset(output);\n\t\t\tcontinue;\n\t\t}\n\n\t\t/*\n\t\t * Lookup the attribute type in the syscache for the output function\n\t\t */\n\t\ttypeTup = SearchSysCache(TYPEOID,\n\t\t\t\t\t\t\t\t ObjectIdGetDatum(tupdesc->attrs[i]->atttypid),\n\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\tif (!HeapTupleIsValid(typeTup))\n\t\t{\n\t\t\telog(ERROR, \"cache lookup failed for type %u\",\n\t\t\t\t tupdesc->attrs[i]->atttypid);\n\t\t}\n\n\t\ttypoutput = ((Form_pg_type) GETSTRUCT(typeTup))->typoutput;\n\t\ttypioparam = getTypeIOParam(typeTup);\n\t\tReleaseSysCache(typeTup);\n\n\t\t/* Append the attribute name and the value to the list. */\n\t\toutputstr =\n\t\t\tDatumGetCString(OidFunctionCall3(typoutput, attr,\n\t\t\t\t\t\t\t\t\t\t\t ObjectIdGetDatum(typioparam),\n\t\t\t\t\t\t\t\t\t\t\t Int32GetDatum(tupdesc->attrs[i]->atttypmod)));\n\t\tadd_assoc_string(output, attname, outputstr, 1);\n\t\tpfree(outputstr);\n\t}\n\n\treturn output;\n}\n\n/*\n * plphp_modify_tuple\n * \t\tReturn the modified NEW tuple, for use as return value in a BEFORE\n * \t\ttrigger. outdata must point to the $_TD variable from the PHP\n * \t\tfunction.\n *\n * The tuple will be allocated in the current memory context and must be freed\n * by the caller.\n *\n * XXX Possible optimization: make this a global context that is not deleted,\n * but only reset each time this function is called. (Think about triggers\n * calling other triggers though).\n */\nHeapTuple\nplphp_modify_tuple(zval *outdata, TriggerData *tdata)\n{\n\tTupleDesc\ttupdesc;\n\tHeapTuple\trettuple;\n\tzval\t *newtup;\n\tzval\t **element;\n\tchar\t **vals;\n\tint\t\t\ti;\n\tAttInMetadata *attinmeta;\n\tMemoryContext tmpcxt,\n\t\t\t\t oldcxt;\n\n\ttmpcxt = AllocSetContextCreate(CurTransactionContext,\n\t\t\t\t\t\t\t\t \"PL/php NEW context\",\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MINSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_INITSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MAXSIZE);\n\n\toldcxt = MemoryContextSwitchTo(tmpcxt);\n\n\t/* Fetch \"new\" from $_TD */\n\tif (zend_hash_find(outdata->value.ht,\n\t\t\t\t\t \"new\", strlen(\"new\") + 1,\n\t\t\t\t\t (void **) &element) != SUCCESS)\n\t\telog(ERROR, \"$_TD['new'] not found\");\n\n\tif (Z_TYPE_P(element[0]) != IS_ARRAY)\n\t\telog(ERROR, \"$_TD['new'] must be an array\");\n\tnewtup = element[0];\n\n\t/* Fetch the tupledesc and metadata */\n\ttupdesc = tdata->tg_relation->rd_att;\n\tattinmeta = TupleDescGetAttInMetadata(tupdesc);\n\n\ti = zend_hash_num_elements(Z_ARRVAL_P(newtup));\n\n\tif (tupdesc->natts > i)\n\t\tereport(ERROR,\n\t\t\t\t(errmsg(\"insufficient number of keys in $_TD['new']\"),\n\t\t\t\t errdetail(\"At least %d expected, %d found.\",\n\t\t\t\t\t\t tupdesc->natts, i)));\n\n\tvals = (char **) palloc(tupdesc->natts * sizeof(char *));\n\n\t/*\n\t * For each attribute in the tupledesc, get its value from newtup and put\n\t * it in an array of cstrings.\n\t */\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\tzval **element;\n\t\tchar *attname = NameStr(tupdesc->attrs[i]->attname);\n\n\t\t/* Fetch the attribute value from the zval */\n\t\tif (zend_symtable_find(newtup->value.ht, attname, strlen(attname) + 1,\n\t\t\t\t\t\t \t (void **) &element) != SUCCESS)\n\t\t\telog(ERROR, \"$_TD['new'] does not contain attribute \\\"%s\\\"\",\n\t\t\t\t attname);\n\n\t\tvals[i] = plphp_zval_get_cstring(element[0], true, true);\n\t}\n\n\t/* Return to the original context so that the new tuple will survive */\n\tMemoryContextSwitchTo(oldcxt);\n\n\t/* Build the tuple */\n\trettuple = BuildTupleFromCStrings(attinmeta, vals);\n\n\t/* Free the memory used */\n\tMemoryContextDelete(tmpcxt);\n\n\treturn rettuple;\n}\n\n/*\n * vim:ts=4:sw=4:cino=(0\n */\n", "plphp_spi.c": "/**********************************************************************\n * plphp_spi.c - SPI-related functions for PL/php.\n *\n * This software is copyright (c) Command Prompt Inc.\n *\n * The author hereby grants permission to use, copy, modify,\n * distribute, and license this software and its documentation for any\n * purpose, provided that existing copyright notices are retained in\n * all copies and that this notice is included verbatim in any\n * distributions. No written agreement, license, or royalty fee is\n * required for any of the authorized uses. Modifications to this\n * software may be copyrighted by their author and need not follow the\n * licensing terms described here, provided that the new terms are\n * clearly indicated on the first page of each file where they apply.\n *\n * IN NO EVENT SHALL THE AUTHOR OR DISTRIBUTORS BE LIABLE TO ANY PARTY\n * FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES\n * ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY\n * DERIVATIVES THEREOF, EVEN IF THE AUTHOR HAVE BEEN ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n *\n * THE AUTHOR AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,\n * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND\n * NON-INFRINGEMENT. THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS,\n * AND THE AUTHOR AND DISTRIBUTORS HAVE NO OBLIGATION TO PROVIDE\n * MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.\n *\n * IDENTIFICATION\n *\t\t$Id$\n *********************************************************************\n */\n\n#include \"postgres.h\"\n#include \"plphp_spi.h\"\n#include \"plphp_io.h\"\n\n/* PHP stuff */\n#include \"php.h\"\n\n/* PostgreSQL stuff */\n#include \"access/xact.h\"\n#include \"miscadmin.h\"\n\n#undef DEBUG_PLPHP_MEMORY\n\n#ifdef DEBUG_PLPHP_MEMORY\n#define REPORT_PHP_MEMUSAGE(where) \\\n\telog(NOTICE, \"PHP mem usage: \u00ab%s\u00bb: %u\", where, AG(allocated_memory));\n#else\n#define REPORT_PHP_MEMUSAGE(a) \n#endif\n\n/* resource type Id for SPIresult */\nint SPIres_rtype;\n\n/* SPI function table */\nzend_function_entry spi_functions[] =\n{\n\tZEND_FE(spi_exec, NULL)\n\tZEND_FE(spi_fetch_row, NULL)\n\tZEND_FE(spi_processed, NULL)\n\tZEND_FE(spi_status, NULL)\n\tZEND_FE(spi_rewind, NULL)\n\tZEND_FE(pg_raise, NULL)\n\tZEND_FE(return_next, NULL)\n\t{NULL, NULL, NULL}\n};\n\n/* SRF support: */\nFunctionCallInfo current_fcinfo = NULL;\nTupleDesc current_tupledesc = NULL;\nAttInMetadata *current_attinmeta = NULL;\nMemoryContext current_memcxt = NULL;\nTuplestorestate *current_tuplestore = NULL;\n\n\n/* A symbol table to save for return_next for the RETURNS TABLE case */\nHashTable *saved_symbol_table;\n\nstatic zval *get_table_arguments(AttInMetadata *attinmeta);\n\n/*\n * spi_exec\n * \t\tPL/php equivalent to SPI_exec().\n *\n * This function creates and return a PHP resource which describes the result\n * of a user-specified query. If the query returns tuples, it's possible to\n * retrieve them by using spi_fetch_row.\n *\n * Receives one or two arguments. The mandatory first argument is the query\n * text. The optional second argument is the tuple limit.\n *\n * Note that just like PL/Perl, we start a subtransaction before invoking the\n * SPI call, and automatically roll it back if the call fails.\n */\nZEND_FUNCTION(spi_exec)\n{\n\tchar\t *query;\n\tint\t\t\tquery_len;\n\tlong\t\tstatus;\n\tlong\t\tlimit;\n\tphp_SPIresult *SPIres;\n\tint\t\t\tspi_id;\n\tMemoryContext oldcontext = CurrentMemoryContext;\n\tResourceOwner oldowner = CurrentResourceOwner;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_exec called\");\n\n\tif ((ZEND_NUM_ARGS() > 2) || (ZEND_NUM_ARGS() < 1))\n\t\tWRONG_PARAM_COUNT;\n\n\t/* Parse arguments */\n\tif (ZEND_NUM_ARGS() == 2)\n\t{\n\t\tif (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"sl\",\n\t\t\t\t\t\t\t\t &query, &query_len, &limit) == FAILURE)\n\t\t{\n\t\t\tzend_error(E_WARNING, \"Can not parse parameters in %s\",\n\t\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\t\tRETURN_FALSE;\n\t\t}\n\t}\n\telse if (ZEND_NUM_ARGS() == 1)\n\t{\n\t\tif (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"s\",\n\t\t\t\t\t\t\t\t &query, &query_len) == FAILURE)\n\t\t{\n\t\t\tzend_error(E_WARNING, \"Can not parse parameters in %s\",\n\t\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\t\tRETURN_FALSE;\n\t\t}\n\t\tlimit = 0;\n\t}\n\telse\n\t{\n\t\tzend_error(E_WARNING, \"Incorrect number of parameters to %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tBeginInternalSubTransaction(NULL);\n\tMemoryContextSwitchTo(oldcontext);\n\n\t/* Call SPI */\n\tPG_TRY();\n\t{\n\t\tstatus = SPI_exec(query, limit);\n\n\t\tReleaseCurrentSubTransaction();\n\t\tMemoryContextSwitchTo(oldcontext);\n\t\tCurrentResourceOwner = oldowner;\n\n\t\t/*\n\t\t * AtEOSubXact_SPI() should not have popped any SPI context, but just\n\t\t * in case it did, make sure we remain connected.\n\t\t */\n\t\tSPI_restore_connection();\n\t}\n\tPG_CATCH();\n\t{\n\t\tErrorData\t*edata;\n\n\t\t/* Save error info */\n\t\tMemoryContextSwitchTo(oldcontext);\n\t\tedata = CopyErrorData();\n\t\tFlushErrorState();\n\n\t\t/* Abort the inner trasaction */\n\t\tRollbackAndReleaseCurrentSubTransaction();\n\t\tMemoryContextSwitchTo(oldcontext);\n\t\tCurrentResourceOwner = oldowner;\n\n\t\t/*\n\t\t * If AtEOSubXact_SPI() popped any SPI context of the subxact, it will\n\t\t * have left us in a disconnected state. We need this hack to return\n\t\t * to connected state.\n\t\t */\n\t\tSPI_restore_connection();\n\n\t\t/* bail PHP out */\n\t\tzend_error(E_ERROR, \"%s\", strdup(edata->message));\n\n\t\t/* Can't get here, but keep compiler quiet */\n\t\treturn;\n\t}\n\tPG_END_TRY();\n\n\t/* This malloc'ed chunk is freed in php_SPIresult_destroy */\n\tSPIres = (php_SPIresult *) malloc(sizeof(php_SPIresult));\n\tif (!SPIres)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_OUT_OF_MEMORY),\n\t\t\t\t errmsg(\"out of memory\")));\n\n\t/* Prepare the return resource */\n\tSPIres->SPI_processed = SPI_processed;\n\tif (status == SPI_OK_SELECT)\n\t\tSPIres->SPI_tuptable = SPI_tuptable;\n\telse\n\t\tSPIres->SPI_tuptable = NULL;\n\tSPIres->current_row = 0;\n\tSPIres->status = status;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_exec: creating resource\");\n\n\t/* Register the resource to PHP so it will be able to free it */\n\tspi_id = ZEND_REGISTER_RESOURCE(return_value, (void *) SPIres,\n\t\t\t\t\t \t\t\t\tSPIres_rtype);\n\n\tREPORT_PHP_MEMUSAGE(\"spi_exec: returning\");\n\n\tRETURN_RESOURCE(spi_id);\n}\n\n/*\n * spi_fetch_row\n * \t\tGrab a row from a SPI result (from spi_exec).\n *\n * This function receives a resource Id and returns a PHP hash representing the\n * next tuple in the result, or false if no tuples remain.\n *\n * XXX Apparently this is leaking memory. How do we tell PHP to free the tuple\n * once the user is done with it?\n */\nZEND_FUNCTION(spi_fetch_row)\n{\n\tzval\t *row = NULL;\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_fetch_row: called\");\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Can not parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tif (SPIres->status != SPI_OK_SELECT)\n\t{\n\t\tzend_error(E_WARNING, \"SPI status is not good\");\n\t\tRETURN_FALSE;\n\t}\n\n\tif (SPIres->current_row < SPIres->SPI_processed)\n\t{\n\t\trow = plphp_zval_from_tuple(SPIres->SPI_tuptable->vals[SPIres->current_row],\n\t\t\t \t\t\t\t\t\tSPIres->SPI_tuptable->tupdesc);\n\t\tSPIres->current_row++;\n\n\t\t*return_value = *row;\n\n\t\tzval_copy_ctor(return_value);\n\t\tzval_dtor(row);\n\t\tFREE_ZVAL(row);\n\n\t}\n\telse\n\t\tRETURN_FALSE;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_fetch_row: finish\");\n}\n\n/*\n * spi_processed\n * \t\tReturn the number of tuples returned in a spi_exec call.\n */\nZEND_FUNCTION(spi_processed)\n{\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_processed: start\");\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tREPORT_PHP_MEMUSAGE(\"spi_processed: finish\");\n\n\tRETURN_LONG(SPIres->SPI_processed);\n}\n\n/*\n * spi_status\n * \t\tReturn the status returned by a previous spi_exec call, as a string.\n */\nZEND_FUNCTION(spi_status)\n{\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_status: start\");\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tREPORT_PHP_MEMUSAGE(\"spi_status: finish\");\n\n\t/*\n\t * XXX The cast is wrong, but we use it to prevent a compiler warning.\n\t * Note that the second parameter to RETURN_STRING is \"duplicate\", so\n\t * we are returning a copy of the string anyway.\n\t */\n\tRETURN_STRING((char *) SPI_result_code_string(SPIres->status), true);\n}\n\n/*\n * spi_rewind\n * \t\tResets the internal counter for spi_fetch_row, so the next\n * \t\tspi_fetch_row call will start fetching from the beginning.\n */\nZEND_FUNCTION(spi_rewind)\n{\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tSPIres->current_row = 0;\n\n\tRETURN_NULL();\n}\n/*\n * pg_raise\n * User-callable function for sending messages to the Postgres log.\n */\nZEND_FUNCTION(pg_raise)\n{\n\tchar *level = NULL,\n\t\t\t *message = NULL;\n\tint level_len,\n\t\t\t\tmessage_len,\n\t\t\t\televel = 0;\n\n\tif (ZEND_NUM_ARGS() != 2)\n\t{\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t errmsg(\"wrong number of arguments to %s\", \"pg_raise\")));\n\t}\n\n\tif (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"ss\",\n\t\t\t\t\t\t\t &level, &level_len,\n\t\t\t\t\t\t\t &message, &message_len) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t}\n\n\tif (strcasecmp(level, \"ERROR\") == 0)\n\t\televel = E_ERROR;\n\telse if (strcasecmp(level, \"WARNING\") == 0)\n\t\televel = E_WARNING;\n\telse if (strcasecmp(level, \"NOTICE\") == 0)\n\t\televel = E_NOTICE;\n\telse\n\t\tzend_error(E_ERROR, \"incorrect log level\");\n\n\tzend_error(elevel, \"%s\", message);\n}\n\n/*\n * return_next\n * \t\tAdd a tuple to the current tuplestore\n */\nZEND_FUNCTION(return_next)\n{\n\tMemoryContext\toldcxt;\n\tzval\t *param;\n\tHeapTuple\ttup;\n\tReturnSetInfo *rsi;\n\t\n\t/*\n\t * Disallow use of return_next inside non-SRF functions\n\t */\n\tif (current_fcinfo == NULL || current_fcinfo->flinfo == NULL || \n\t\t!current_fcinfo->flinfo->fn_retset)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t errmsg(\"cannot use return_next in functions not declared to \"\n\t\t\t\t\t\t\"return a set\")));\n\n\trsi = (ReturnSetInfo *) current_fcinfo->resultinfo;\n\n\tAssert(current_tupledesc != NULL);\n\tAssert(rsi != NULL);\n\t\n\tif (ZEND_NUM_ARGS() > 1)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t errmsg(\"wrong number of arguments to %s\", \"return_next\")));\n\n\tif (ZEND_NUM_ARGS() == 0)\n\t{\n\t\t/* \n\t\t * Called from the function declared with RETURNS TABLE \n\t */\n\t\tparam = get_table_arguments(current_attinmeta);\n\t}\n\telse if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"z\",\n\t\t\t\t\t\t\t ¶m) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t}\n\n\t/* Use the per-query context so that the tuplestore survives */\n\toldcxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);\n\n\t/* Form the tuple */\n\ttup = plphp_srf_htup_from_zval(param, current_attinmeta, current_memcxt);\n\n\t/* First call? Create the tuplestore. */\n\tif (!current_tuplestore)\n\t\tcurrent_tuplestore = tuplestore_begin_heap(true, false, work_mem);\n\n\t/* Save the tuple and clean up */\n\ttuplestore_puttuple(current_tuplestore, tup);\n\theap_freetuple(tup);\n\n\tMemoryContextSwitchTo(oldcxt);\n}\n\n/*\n * php_SPIresult_destroy\n * \t\tFree the resources allocated by a spi_exec call.\n *\n * This is automatically called when the resource goes out of scope\n * or is overwritten by another resource.\n */\nvoid\nphp_SPIresult_destroy(zend_rsrc_list_entry *rsrc TSRMLS_DC)\n{\n\tphp_SPIresult *res = (php_SPIresult *) rsrc->ptr;\n\n\tif (res->SPI_tuptable != NULL)\n\t\tSPI_freetuptable(res->SPI_tuptable);\n\n\tfree(res);\n}\n\n/* Return an array of TABLE argument values for return_next */\nstatic\nzval *get_table_arguments(AttInMetadata *attinmeta)\n{\n\tzval *retval = NULL;\n\tint\t\ti;\n\t\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\tAssert(attinmeta->tupdesc);\n\tAssert(saved_symbol_table != NULL);\n\t/* Extract OUT argument names */\n\tfor (i = 0; i < attinmeta->tupdesc->natts; i++)\n\t{\n\t\tzval \t**val;\n\t\tchar \t*attname;\n\n\t\tAssert(!attinmeta->tupdesc->attrs[i]->attisdropped);\n\n\t\tattname = NameStr(attinmeta->tupdesc->attrs[i]->attname);\n\n\t\tif (zend_hash_find(saved_symbol_table, \n\t\t\t\t\t\t attname, strlen(attname) + 1,\n\t\t\t\t\t\t (void **)&val) == SUCCESS)\n\n\t\t\tadd_next_index_zval(retval, *val);\n\t\telse\n\t\t\tadd_next_index_unset(retval);\n\t} \n\treturn retval;\n}\n\n\n/*\n * vim:ts=4:sw=4:cino=(0\n */\n"}, "files_after": {"plphp.c": "/**********************************************************************\n * plphp.c - PHP as a procedural language for PostgreSQL\n *\n * This software is copyright (c) Command Prompt Inc.\n *\n * The author hereby grants permission to use, copy, modify,\n * distribute, and license this software and its documentation for any\n * purpose, provided that existing copyright notices are retained in\n * all copies and that this notice is included verbatim in any\n * distributions. No written agreement, license, or royalty fee is\n * required for any of the authorized uses. Modifications to this\n * software may be copyrighted by their author and need not follow the\n * licensing terms described here, provided that the new terms are\n * clearly indicated on the first page of each file where they apply.\n *\n * IN NO EVENT SHALL THE AUTHOR OR DISTRIBUTORS BE LIABLE TO ANY PARTY\n * FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES\n * ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY\n * DERIVATIVES THEREOF, EVEN IF THE AUTHOR HAVE BEEN ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n *\n * THE AUTHOR AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,\n * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND\n * NON-INFRINGEMENT. THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS,\n * AND THE AUTHOR AND DISTRIBUTORS HAVE NO OBLIGATION TO PROVIDE\n * MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.\n *\n * IDENTIFICATION\n *\t\t$Id$\n *********************************************************************\n */\n\n\n/* Package configuration generated by autoconf */\n#include \"config.h\"\n\n/* First round of undefs, to eliminate collision between plphp and postgresql\n * definitions\n */\n \n#undef PACKAGE_BUGREPORT\n#undef PACKAGE_NAME\n#undef PACKAGE_STRING\n#undef PACKAGE_TARNAME\n#undef PACKAGE_VERSION\n\n/* PostgreSQL stuff */\n#include \"postgres.h\"\n#include \"access/heapam.h\"\n#include \"access/transam.h\"\n#include \"access/htup_details.h\"\n\n#include \"catalog/catversion.h\"\n#include \"catalog/pg_language.h\"\n#include \"catalog/pg_proc.h\"\n#include \"catalog/pg_type.h\"\n\n#include \"commands/trigger.h\"\n#include \"fmgr.h\"\n#include \"funcapi.h\"\t\t\t/* needed for SRF support */\n#include \"lib/stringinfo.h\"\n\n#include \"utils/array.h\"\n#include \"utils/builtins.h\"\n#include \"utils/elog.h\"\n#include \"utils/lsyscache.h\"\n#include \"utils/memutils.h\"\n#include \"utils/rel.h\"\n#include \"utils/syscache.h\"\n#include \"utils/typcache.h\"\n\n/*\n * These are defined again in php.h, so undef them to avoid some\n * cpp warnings.\n */\n#undef PACKAGE_BUGREPORT\n#undef PACKAGE_NAME\n#undef PACKAGE_STRING\n#undef PACKAGE_TARNAME\n#undef PACKAGE_VERSION\n\n/* PHP stuff */\n#include \"php.h\"\n\n#include \"php_variables.h\"\n#include \"php_globals.h\"\n#include \"zend_hash.h\"\n#include \"zend_modules.h\"\n\n#include \"php_ini.h\"\t\t\t/* needed for INI_HARDCODED */\n#include \"php_main.h\"\n\n/* Our own stuff */\n#include \"plphp_io.h\"\n#include \"plphp_spi.h\"\n\n/* system stuff */\n#if HAVE_FCNTL_H\n#include \n#endif\n#if HAVE_UNISTD_H\n#include \n#endif\n\n#define INI_HARDCODED(name,value) \\\n\t\tzend_alter_ini_entry(name, sizeof(name), value, strlen(value), \\\n\t\t\t\t\t\t\t PHP_INI_SYSTEM, PHP_INI_STAGE_ACTIVATE);\n\n/* Check for PostgreSQL version */\n#if (CATALOG_VERSION_NO >= 200709301)\n#define PG_VERSION_83_COMPAT\n#endif\n#if (CATALOG_VERSION_NO >= 200611241)\n#define PG_VERSION_82_COMPAT\n#endif\n/* We only support 8.1 and above */\n#if (CATALOG_VERSION_NO >= 200510211)\n#define PG_VERSION_81_COMPAT\n#else\n#error \"Unsupported PostgreSQL version\"\n#endif\n\n#undef DEBUG_PLPHP_MEMORY\n\n#ifdef DEBUG_PLPHP_MEMORY\n#define REPORT_PHP_MEMUSAGE(where) \\\n\telog(NOTICE, \"PHP mem usage: %s: %u\", where, AG(allocated_memory));\n#else\n#define REPORT_PHP_MEMUSAGE(a) \n#endif\n\n/* PostgreSQL starting from v 8.2 requires this define\n * for all modules.\n */\n#ifdef PG_VERSION_82_COMPAT\nPG_MODULE_MAGIC;\n#else\n/* Supress warnings on 8.1 and below */\n#define ReleaseTupleDesc(tupdesc) \n#endif\n\n/* PHP 5.2 and earlier do not contain these definitions */\n#ifndef Z_SET_ISREF_P\n#define Z_SET_ISREF_P(foo) (foo)->is_ref = 1\n#define Z_UNSET_ISREF_P(foo) (foo)->is_ref = 0\n#endif\n\n/* 8.2 compatibility */\n#ifndef TYPTYPE_PSEUDO\n#define TYPTYPE_PSEUDO 'p'\n#define TYPTYPE_COMPOSITE 'c'\n#endif\n\n/* Check the argument type to expect to accept an initial value */\n#define IS_ARGMODE_OUT(mode) ((mode) == PROARGMODE_OUT || \\\n(mode) == PROARGMODE_TABLE)\n/*\n * Return types. Why on earth is this a bitmask? Beats me.\n * We should have separate flags instead.\n */\ntypedef enum pl_type\n{\n\tPL_TUPLE = 1 << 0,\n\tPL_ARRAY = 1 << 1,\n\tPL_PSEUDO = 1 << 2\n} pl_type;\n\n/*\n * The information we cache about loaded procedures.\n *\n * \"proname\" is the name of the function, given by the user.\n *\n * fn_xmin and fn_cmin are used to know when a function has been redefined and\n * needs to be recompiled.\n *\n * trusted indicates whether the function was created with a trusted handler.\n *\n * ret_type is a weird bitmask that indicates whether this function returns a\n * tuple, an array or a pseudotype. ret_oid is the Oid of the return type.\n * retset indicates whether the function was declared to return a set.\n *\n * arg_argmode indicates whether the argument is IN, OUT or both. It follows\n * values in pg_proc.proargmodes.\n *\n * n_out_args - total number of OUT or INOUT arguments.\n * arg_out_tupdesc is a tuple descriptor of the tuple constructed for OUT args.\n *\n * XXX -- maybe this thing needs to be rethought.\n */\ntypedef struct plphp_proc_desc\n{\n\tchar\t *proname;\n\tTransactionId fn_xmin;\n\tCommandId\tfn_cmin;\n\tbool\t\ttrusted;\n\tpl_type\t\tret_type;\n\tOid\t\t\tret_oid;\t\t/* Oid of returning type */\n\tbool\t\tretset;\n\tFmgrInfo\tresult_in_func;\n\tOid\t\t\tresult_typioparam;\n\tint\t\t\tn_out_args;\n\tint\t\t\tn_total_args;\n\tint\t\t\tn_mixed_args;\n\tFmgrInfo\targ_out_func[FUNC_MAX_ARGS];\n\tOid\t\t\targ_typioparam[FUNC_MAX_ARGS];\n\tchar\t\targ_typtype[FUNC_MAX_ARGS];\n\tchar\t\targ_argmode[FUNC_MAX_ARGS];\n\tTupleDesc\targs_out_tupdesc;\n} plphp_proc_desc;\n\n/*\n * Global data\n */\nstatic bool plphp_first_call = true;\nstatic zval *plphp_proc_array = NULL;\n\n/* for PHP write/flush */\nstatic StringInfo currmsg = NULL;\n\n/*\n * for PHP <-> Postgres error message passing\n *\n * XXX -- it would be much better if we could save errcontext,\n * errhint, etc as well.\n */\nstatic char *error_msg = NULL;\n/*\n * Forward declarations\n */\nstatic void plphp_init_all(void);\nvoid\t\tplphp_init(void);\n\nPG_FUNCTION_INFO_V1(plphp_call_handler);\nDatum plphp_call_handler(PG_FUNCTION_ARGS);\n\nPG_FUNCTION_INFO_V1(plphp_validator);\nDatum plphp_validator(PG_FUNCTION_ARGS);\n\nstatic Datum plphp_trigger_handler(FunctionCallInfo fcinfo,\n\t\t\t\t\t\t\t\t plphp_proc_desc *desc\n\t\t\t\t\t\t\t\t TSRMLS_DC);\nstatic Datum plphp_func_handler(FunctionCallInfo fcinfo,\n\t\t\t\t\t\t\t plphp_proc_desc *desc\n\t\t\t\t\t\t\t\tTSRMLS_DC);\nstatic Datum plphp_srf_handler(FunctionCallInfo fcinfo,\n\t\t\t\t\t\t \t plphp_proc_desc *desc\n\t\t\t\t\t\t\t TSRMLS_DC);\n\nstatic plphp_proc_desc *plphp_compile_function(Oid fnoid, bool is_trigger TSRMLS_DC);\nstatic zval *plphp_call_php_func(plphp_proc_desc *desc,\n\t\t\t\t\t\t\t\t FunctionCallInfo fcinfo\n\t\t\t\t\t\t\t\t TSRMLS_DC);\nstatic zval *plphp_call_php_trig(plphp_proc_desc *desc,\n\t\t\t\t\t\t\t\t FunctionCallInfo fcinfo, zval *trigdata\n\t\t\t\t\t\t\t\t TSRMLS_DC);\n\nstatic void plphp_error_cb(int type, const char *filename, const uint lineno,\n\t\t\t\t\t\t\t\t const char *fmt, va_list args);\nstatic bool is_valid_php_identifier(char *name);\n\n/*\n * FIXME -- this comment is quite misleading actually, which is not surprising\n * since it came verbatim from PL/pgSQL. Rewrite memory handling here someday\n * and remove it.\n *\n * This routine is a crock, and so is everyplace that calls it. The problem\n * is that the cached form of plphp functions/queries is allocated permanently\n * (mostly via malloc()) and never released until backend exit. Subsidiary\n * data structures such as fmgr info records therefore must live forever\n * as well. A better implementation would store all this stuff in a per-\n * function memory context that could be reclaimed at need. In the meantime,\n * fmgr_info_cxt must be called specifying TopMemoryContext so that whatever\n * it might allocate, and whatever the eventual function might allocate using\n * fn_mcxt, will live forever too.\n */\nstatic void\nperm_fmgr_info(Oid functionId, FmgrInfo *finfo)\n{\n\tfmgr_info_cxt(functionId, finfo, TopMemoryContext);\n}\n\n/*\n * sapi_plphp_write\n * \t\tCalled when PHP wants to write something to stdout.\n *\n * We just save the output in a StringInfo until the next Flush call.\n */\nstatic int\nsapi_plphp_write(const char *str, uint str_length TSRMLS_DC)\n{\n\tif (currmsg == NULL)\n\t\tcurrmsg = makeStringInfo();\n\n\tappendStringInfoString(currmsg, str);\n\n\treturn str_length;\n}\n\n/*\n * sapi_plphp_flush\n * \t\tCalled when PHP wants to flush stdout.\n *\n * The stupid PHP implementation calls write and follows with a Flush right\n * away -- a good implementation would write several times and flush when the\n * message is complete. To make the output look reasonable in Postgres, we\n * skip the flushing if the accumulated message does not end in a newline.\n */\nstatic void\nsapi_plphp_flush(void *sth)\n{\n\tif (currmsg != NULL)\n\t{\n\t\tAssert(currmsg->data != NULL);\n\n\t\tif (currmsg->data[currmsg->len - 1] == '\\n')\n\t\t{\n\t\t\t/*\n\t\t\t * remove the trailing newline because elog() inserts another\n\t\t\t * one\n\t\t\t */\n\t\t\tcurrmsg->data[currmsg->len - 1] = '\\0';\n\t\t}\n\t\telog(LOG, \"%s\", currmsg->data);\n\n\t\tpfree(currmsg->data);\n\t\tpfree(currmsg);\n\t\tcurrmsg = NULL;\n\t}\n\telse\n\t\telog(LOG, \"attempting to flush a NULL message\");\n}\n\nstatic int\nsapi_plphp_send_headers(sapi_headers_struct *sapi_headers TSRMLS_DC)\n{\n\treturn 1;\n}\n\nstatic void\nphp_plphp_log_messages(char *message)\n{\n\telog(LOG, \"plphp: %s\", message);\n}\n\n\nstatic sapi_module_struct plphp_sapi_module = {\n\t\"plphp\",\t\t\t\t\t/* name */\n\t\"PL/php PostgreSQL Handler\",/* pretty name */\n\n\tNULL,\t\t\t\t\t\t/* startup */\n\tphp_module_shutdown_wrapper,/* shutdown */\n\n\tNULL,\t\t\t\t\t\t/* activate */\n\tNULL,\t\t\t\t\t\t/* deactivate */\n\n\tsapi_plphp_write,\t\t\t/* unbuffered write */\n\tsapi_plphp_flush,\t\t\t/* flush */\n\tNULL,\t\t\t\t\t\t/* stat */\n\tNULL,\t\t\t\t\t\t/* getenv */\n\n\tphp_error,\t\t\t\t\t/* sapi_error(int, const char *, ...) */\n\n\tNULL,\t\t\t\t\t\t/* header handler */\n\tsapi_plphp_send_headers,\t/* send headers */\n\tNULL,\t\t\t\t\t\t/* send header */\n\n\tNULL,\t\t\t\t\t\t/* read POST */\n\tNULL,\t\t\t\t\t\t/* read cookies */\n\n\tNULL,\t\t\t\t\t\t/* register server variables */\n\tphp_plphp_log_messages,\t\t/* log message */\n\n\tNULL,\t\t\t\t\t\t/* Block interrupts */\n\tNULL,\t\t\t\t\t\t/* Unblock interrupts */\n\tSTANDARD_SAPI_MODULE_PROPERTIES\n};\n\n/*\n * plphp_init_all()\t\t- Initialize all\n *\n * XXX This is called each time a function is invoked.\n */\nstatic void\nplphp_init_all(void)\n{\n\t/* Execute postmaster-startup safe initialization */\n\tif (plphp_first_call)\n\t\tplphp_init();\n\n\t/*\n\t * Any other initialization that must be done each time a new\n\t * backend starts -- currently none.\n\t */\n}\n\n/*\n * This function must not be static, so that it can be used in\n * preload_libraries. If it is, it will be called by postmaster;\n * otherwise it will be called by each backend the first time a\n * function is called.\n */\nvoid\nplphp_init(void)\n{\n\tTSRMLS_FETCH();\n\t/* Do initialization only once */\n\tif (!plphp_first_call)\n\t\treturn;\n\n\t/*\n\t * Need a Pg try/catch block to prevent an initialization-\n\t * failure from bringing the whole server down.\n\t */\n\tPG_TRY();\n\t{\n\t\tzend_try\n\t\t{\n\t\t\t/*\n\t\t\t * XXX This is a hack -- we are replacing the error callback in an\n\t\t\t * invasive manner that should not be expected to work on future PHP\n\t\t\t * releases.\n\t\t\t */\n\t\t\tzend_error_cb = plphp_error_cb;\n\n\t\t\t/* Omit HTML tags from output */\n\t\t\tplphp_sapi_module.phpinfo_as_text = 1;\n\t\t\tsapi_startup(&plphp_sapi_module);\n\n\t\t\tif (php_module_startup(&plphp_sapi_module, NULL, 0) == FAILURE)\n\t\t\t\telog(ERROR, \"php_module_startup call failed\");\n\n\t\t\t/* php_module_startup changed it, so put it back */\n\t\t\tzend_error_cb = plphp_error_cb;\n\n\t\t\t/*\n\t\t\t * FIXME -- Figure out what this comment is supposed to mean:\n\t\t\t *\n\t\t\t * There is no way to see if we must call zend_ini_deactivate()\n\t\t\t * since we cannot check if EG(ini_directives) has been initialised\n\t\t\t * because the executor's constructor does not initialize it.\n\t\t\t * Apart from that there seems no need for zend_ini_deactivate() yet.\n\t\t\t * So we error out.\n\t\t\t */\n\n\t\t\t/* Init procedure cache */\n\t\t\tMAKE_STD_ZVAL(plphp_proc_array);\n\t\t\tarray_init(plphp_proc_array);\n\n\t\t\tzend_register_functions(\n#if PHP_MAJOR_VERSION == 5\n\t\t\t\t\t\t\t\t\tNULL,\n#endif\n\t\t\t\t\t\t\t\t\tspi_functions, NULL,\n\t\t\t\t\t\t\t\t\tMODULE_PERSISTENT TSRMLS_CC);\n\n\t\t\tPG(during_request_startup) = true;\n\n\t\t\t/* Set some defaults */\n\t\t\tSG(options) |= SAPI_OPTION_NO_CHDIR;\n\n\t\t\t/* Hard coded defaults which cannot be overwritten in the ini file */\n\t\t\tINI_HARDCODED(\"register_argc_argv\", \"0\");\n\t\t\tINI_HARDCODED(\"html_errors\", \"0\");\n\t\t\tINI_HARDCODED(\"implicit_flush\", \"1\");\n\t\t\tINI_HARDCODED(\"max_execution_time\", \"0\");\n\t\t\tINI_HARDCODED(\"max_input_time\", \"-1\");\n\n\t\t\t/*\n\t\t\t * Set memory limit to ridiculously high value. This helps the\n\t\t\t * server not to crash, because the PHP allocator has the really\n\t\t\t * stupid idea of calling exit() if the limit is exceeded.\n\t\t\t */\n\t\t\t{\n\t\t\t\tchar\tlimit[15];\n\n\t\t\t\tsnprintf(limit, sizeof(limit), \"%d\", 1 << 30);\n\t\t\t\tINI_HARDCODED(\"memory_limit\", limit);\n\t\t\t}\n\n\t\t\t/* tell the engine we're in non-html mode */\n\t\t\tzend_uv.html_errors = false;\n\n\t\t\t/* not initialized but needed for several options */\n\t\t\tCG(in_compilation) = false;\n\n\t\t\tEG(uninitialized_zval_ptr) = NULL;\n\n\t\t\tif (php_request_startup(TSRMLS_C) == FAILURE)\n\t\t\t{\n\t\t\t\tSG(headers_sent) = 1;\n\t\t\t\tSG(request_info).no_headers = 1;\n\t\t\t\t/* Use Postgres log */\n\t\t\t\telog(ERROR, \"php_request_startup call failed\");\n\t\t\t}\n\n\t\t\tCG(interactive) = false;\n\t\t\tPG(during_request_startup) = true;\n\n\t\t\t/* Register the resource for SPI_result */\n\t\t\tSPIres_rtype = zend_register_list_destructors_ex(php_SPIresult_destroy,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t NULL, \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"SPI result\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0);\n\n\t\t\t/* Ok, we're done */\n\t\t\tplphp_first_call = false;\n\t\t}\n\t\tzend_catch\n\t\t{\n\t\t\tplphp_first_call = true;\n\t\t\tif (error_msg)\n\t\t\t{\n\t\t\t\tchar\tstr[1024];\n\n\t\t\t\tstrncpy(str, error_msg, sizeof(str));\n\t\t\t\tpfree(error_msg);\n\t\t\t\terror_msg = NULL;\n\t\t\t\telog(ERROR, \"fatal error during PL/php initialization: %s\",\n\t\t\t\t\t str);\n\t\t\t}\n\t\t\telse\n\t\t\t\telog(ERROR, \"fatal error during PL/php initialization\");\n\t\t}\n\t\tzend_end_try();\n\t}\n\tPG_CATCH();\n\t{\n\t\tPG_RE_THROW();\n\t}\n\tPG_END_TRY();\n}\n\n/*\n * plphp_call_handler\n *\n * The visible function of the PL interpreter. The PostgreSQL function manager\n * and trigger manager call this function for execution of php procedures.\n */\nDatum\nplphp_call_handler(PG_FUNCTION_ARGS)\n{\n\tDatum\t\tretval;\n\tTSRMLS_FETCH();\n\n\t/* Initialize interpreter */\n\tplphp_init_all();\n\n\tPG_TRY();\n\t{\n\t\t/* Connect to SPI manager */\n\t\tif (SPI_connect() != SPI_OK_CONNECT)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_CONNECTION_FAILURE),\n\t\t\t\t\t errmsg(\"could not connect to SPI manager\")));\n\n\t\tzend_try\n\t\t{\n\t\t\tplphp_proc_desc *desc;\n\n\t\t\t/* Clean up SRF state */\n\t\t\tcurrent_fcinfo = NULL;\n\n\t\t\t/* Redirect to the appropiate handler */\n\t\t\tif (CALLED_AS_TRIGGER(fcinfo))\n\t\t\t{\n\t\t\t\tdesc = plphp_compile_function(fcinfo->flinfo->fn_oid, true TSRMLS_CC);\n\n\t\t\t\t/* Activate PHP safe mode if needed */\n\t\t\t\t//PG(safe_mode) = desc->trusted;\n\n\t\t\t\tretval = plphp_trigger_handler(fcinfo, desc TSRMLS_CC);\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tdesc = plphp_compile_function(fcinfo->flinfo->fn_oid, false TSRMLS_CC);\n\n\t\t\t\t/* Activate PHP safe mode if needed */\n\t\t\t\t//PG(safe_mode) = desc->trusted;\n\n\t\t\t\tif (desc->retset)\n\t\t\t\t\tretval = plphp_srf_handler(fcinfo, desc TSRMLS_CC);\n\t\t\t\telse\n\t\t\t\t\tretval = plphp_func_handler(fcinfo, desc TSRMLS_CC);\n\t\t\t}\n\t\t}\n\t\tzend_catch\n\t\t{\n\t\t\tREPORT_PHP_MEMUSAGE(\"reporting error\");\n\t\t\tif (error_msg)\n\t\t\t{\n\t\t\t\tchar\tstr[1024];\n\n\t\t\t\tstrncpy(str, error_msg, sizeof(str));\n\t\t\t\tpfree(error_msg);\n\t\t\t\terror_msg = NULL;\n\t\t\t\telog(ERROR, \"%s\", str);\n\t\t\t}\n\t\t\telse\n\t\t\t\telog(ERROR, \"fatal error\");\n\n\t\t\t/* not reached, but keep compiler quiet */\n\t\t\treturn 0;\n\t\t}\n\t\tzend_end_try();\n\t}\n\tPG_CATCH();\n\t{\n\t\tPG_RE_THROW();\n\t}\n\tPG_END_TRY();\n\n\treturn retval;\n}\n\n/*\n * plphp_validator\n *\n * \t\tValidator function for checking the function's syntax at creation\n * \t\ttime\n */\nDatum\nplphp_validator(PG_FUNCTION_ARGS)\n{\n\tOid\t\t\t\tfuncoid = PG_GETARG_OID(0);\n\tForm_pg_proc\tprocForm;\n\tHeapTuple\t\tprocTup;\n\tchar\t\t\ttmpname[32];\n\tchar\t\t\tfuncname[NAMEDATALEN];\n\tchar\t\t *tmpsrc = NULL,\n\t\t\t\t *prosrc;\n\tDatum\t\t\tprosrcdatum;\n\n\n\tTSRMLS_FETCH();\n\t/* Initialize interpreter */\n\tplphp_init_all();\n\n\tPG_TRY();\n\t{\n\t\tbool\t\t\tisnull;\n\t\t/* Grab the pg_proc tuple */\n\t\tprocTup = SearchSysCache(PROCOID,\n\t\t\t\t\t\t\t\t ObjectIdGetDatum(funcoid),\n\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\tif (!HeapTupleIsValid(procTup))\n\t\t\telog(ERROR, \"cache lookup failed for function %u\", funcoid);\n\n\t\tprocForm = (Form_pg_proc) GETSTRUCT(procTup);\n\n\t\t/* Get the function source code */\n\t\tprosrcdatum = SysCacheGetAttr(PROCOID,\n\t\t\t\t\t\t\t\t\t procTup,\n\t\t\t\t\t\t\t\t\t Anum_pg_proc_prosrc,\n\t\t\t\t\t\t\t\t\t &isnull);\n\t\tif (isnull)\n\t\t\telog(ERROR, \"cache lookup yielded NULL prosrc\");\n\t\tprosrc = DatumGetCString(DirectFunctionCall1(textout,\n\t\t\t\t\t\t\t\t\t\t\t\t\t prosrcdatum));\n\n\t\t/* Get the function name, for the error message */\n\t\tStrNCpy(funcname, NameStr(procForm->proname), NAMEDATALEN);\n\n\t\t/* Let go of the pg_proc tuple */\n\t\tReleaseSysCache(procTup);\n\n\t\t/* Create a PHP function creation statement */\n\t\tsnprintf(tmpname, sizeof(tmpname), \"plphp_temp_%u\", funcoid);\n\t\ttmpsrc = (char *) palloc(strlen(prosrc) +\n\t\t\t\t\t\t\t\t strlen(tmpname) +\n\t\t\t\t\t\t\t\t strlen(\"function ($args, $argc){ } \"));\n\t\tsprintf(tmpsrc, \"function %s($args, $argc){%s}\",\n\t\t\t\ttmpname, prosrc);\n\n\t\tpfree(prosrc);\n\n\t\tzend_try\n\t\t{\n\t\t\t/*\n\t\t\t * Delete the function from the PHP function table, just in case it\n\t\t\t * already existed. This is quite unlikely, but still.\n\t\t\t */\n\t\t\tzend_hash_del(CG(function_table), tmpname, strlen(tmpname) + 1);\n\n\t\t\t/*\n\t\t\t * Let the user see the fireworks. If the function doesn't validate,\n\t\t\t * the ERROR will be raised and the function will not be created.\n\t\t\t */\n\t\t\tif (zend_eval_string(tmpsrc, NULL,\n\t\t\t\t\t\t\t\t \"plphp function temp source\" TSRMLS_CC) == FAILURE)\n\t\t\t\telog(ERROR, \"function \\\"%s\\\" does not validate\", funcname);\n\n\t\t\tpfree(tmpsrc);\n\t\t\ttmpsrc = NULL;\n\n\t\t\t/* Delete the newly-created function from the PHP function table. */\n\t\t\tzend_hash_del(CG(function_table), tmpname, strlen(tmpname) + 1);\n\t\t}\n\t\tzend_catch\n\t\t{\n\t\t\tif (tmpsrc != NULL)\n\t\t\t\tpfree(tmpsrc);\n\n\t\t\tif (error_msg)\n\t\t\t{\n\t\t\t\tchar\tstr[1024];\n\n\t\t\t\tStrNCpy(str, error_msg, sizeof(str));\n\t\t\t\tpfree(error_msg);\n\t\t\t\terror_msg = NULL;\n\t\t\t\telog(ERROR, \"function \\\"%s\\\" does not validate: %s\", funcname, str);\n\t\t\t}\n\t\t\telse\n\t\t\t\telog(ERROR, \"fatal error\");\n\n\t\t\t/* not reached, but keep compiler quiet */\n\t\t\treturn 0;\n\t\t}\n\t\tzend_end_try();\n\n\t\t/* The result of a validator is ignored */\n\t\tPG_RETURN_VOID();\n\t}\n\tPG_CATCH();\n\t{\n\t\tPG_RE_THROW();\n\t}\n\tPG_END_TRY();\n}\n\n/*\n * plphp_get_function_tupdesc\n *\n * \t\tReturns a TupleDesc of the function's return type.\n */\nstatic TupleDesc\nplphp_get_function_tupdesc(Oid result_type, Node *rsinfo)\n{\n\tif (result_type == RECORDOID)\n\t{\n\t\tReturnSetInfo *rs = (ReturnSetInfo *) rsinfo;\n\t\t/* We must get the information from call context */\n\t\tif (!rsinfo || !IsA(rsinfo, ReturnSetInfo) || rs->expectedDesc == NULL)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t\t errmsg(\"function returning record called in context \"\n\t\t\t\t\t\t\t\"that cannot accept type record\")));\n\t\treturn rs->expectedDesc;\n\t}\n\telse\n\t\t/* ordinary composite type */\n\t\treturn lookup_rowtype_tupdesc(result_type, -1);\n}\n\n\n/*\n * Build the $_TD array for the trigger function.\n */\nstatic zval *\nplphp_trig_build_args(FunctionCallInfo fcinfo)\n{\n\tTriggerData\t *tdata;\n\tTupleDesc\t\ttupdesc;\n\tzval\t\t *retval;\n\tint\t\t\t\ti;\n\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\ttdata = (TriggerData *) fcinfo->context;\n\ttupdesc = tdata->tg_relation->rd_att;\n\n\t/* The basic variables */\n\tadd_assoc_string(retval, \"name\", tdata->tg_trigger->tgname, 1);\n add_assoc_long(retval, \"relid\", tdata->tg_relation->rd_id);\n\tadd_assoc_string(retval, \"relname\", SPI_getrelname(tdata->tg_relation), 1);\n\tadd_assoc_string(retval, \"schemaname\", SPI_getnspname(tdata->tg_relation), 1);\n\n\t/* EVENT */\n\tif (TRIGGER_FIRED_BY_INSERT(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"event\", \"INSERT\", 1);\n\telse if (TRIGGER_FIRED_BY_DELETE(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"event\", \"DELETE\", 1);\n\telse if (TRIGGER_FIRED_BY_UPDATE(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"event\", \"UPDATE\", 1);\n\telse\n\t\telog(ERROR, \"unknown firing event for trigger function\");\n\n\t/* NEW and OLD as appropiate */\n\tif (TRIGGER_FIRED_FOR_ROW(tdata->tg_event))\n\t{\n\t\tif (TRIGGER_FIRED_BY_INSERT(tdata->tg_event))\n\t\t{\n\t\t\tzval\t *hashref;\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_trigtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"new\", hashref);\n\t\t}\n\t\telse if (TRIGGER_FIRED_BY_DELETE(tdata->tg_event))\n\t\t{\n\t\t\tzval\t *hashref;\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_trigtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"old\", hashref);\n\t\t}\n\t\telse if (TRIGGER_FIRED_BY_UPDATE(tdata->tg_event))\n\t\t{\n\t\t\tzval\t *hashref;\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_newtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"new\", hashref);\n\n\t\t\thashref = plphp_build_tuple_argument(tdata->tg_trigtuple, tupdesc);\n\t\t\tadd_assoc_zval(retval, \"old\", hashref);\n\t\t}\n\t\telse\n\t\t\telog(ERROR, \"unknown firing event for trigger function\");\n\t}\n\n\t/* ARGC and ARGS */\n\tadd_assoc_long(retval, \"argc\", tdata->tg_trigger->tgnargs);\n\n\tif (tdata->tg_trigger->tgnargs > 0)\n\t{\n\t\tzval\t *hashref;\n\n\t\tMAKE_STD_ZVAL(hashref);\n\t\tarray_init(hashref);\n\n\t\tfor (i = 0; i < tdata->tg_trigger->tgnargs; i++)\n\t\t\tadd_index_string(hashref, i, tdata->tg_trigger->tgargs[i], 1);\n\n\t\tzend_hash_update(retval->value.ht, \"args\", strlen(\"args\") + 1,\n\t\t\t\t\t\t (void *) &hashref, sizeof(zval *), NULL);\n\t}\n\n\t/* WHEN */\n\tif (TRIGGER_FIRED_BEFORE(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"when\", \"BEFORE\", 1);\n\telse if (TRIGGER_FIRED_AFTER(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"when\", \"AFTER\", 1);\n\telse\n\t\telog(ERROR, \"unknown firing time for trigger function\");\n\n\t/* LEVEL */\n\tif (TRIGGER_FIRED_FOR_ROW(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"level\", \"ROW\", 1);\n\telse if (TRIGGER_FIRED_FOR_STATEMENT(tdata->tg_event))\n\t\tadd_assoc_string(retval, \"level\", \"STATEMENT\", 1);\n\telse\n\t\telog(ERROR, \"unknown firing level for trigger function\");\n\n\treturn retval;\n}\n\n/*\n * plphp_trigger_handler\n * \t\tHandler for trigger function calls\n */\nstatic Datum\nplphp_trigger_handler(FunctionCallInfo fcinfo, plphp_proc_desc *desc TSRMLS_DC)\n{\n\tDatum\t\tretval = 0;\n\tchar\t *srv;\n\tzval\t *phpret,\n\t\t\t *zTrigData;\n\tTriggerData *trigdata;\n\n\tREPORT_PHP_MEMUSAGE(\"going to build the trigger arg\");\n\n\tzTrigData = plphp_trig_build_args(fcinfo);\n\n\tREPORT_PHP_MEMUSAGE(\"going to call the trigger function\");\n\n\tphpret = plphp_call_php_trig(desc, fcinfo, zTrigData TSRMLS_CC);\n\tif (!phpret)\n\t\telog(ERROR, \"error during execution of function %s\", desc->proname);\n\n\tREPORT_PHP_MEMUSAGE(\"trigger called, going to build the return value\");\n\n\t/*\n\t * Disconnect from SPI manager and then create the return values datum (if\n\t * the input function does a palloc for it this must not be allocated in\n\t * the SPI memory context because SPI_finish would free it).\n\t */\n\tif (SPI_finish() != SPI_OK_FINISH)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_CONNECTION_DOES_NOT_EXIST),\n\t\t\t\t errmsg(\"could not disconnect from SPI manager\")));\n\n\ttrigdata = (TriggerData *) fcinfo->context;\n\n\tif (zTrigData->type != IS_ARRAY)\n\t\telog(ERROR, \"$_TD is not an array\");\n\t\t\t \n\t/*\n\t * In a BEFORE trigger, compute the return value. In an AFTER trigger\n\t * it'll be ignored, so don't bother.\n\t */\n\tif (TRIGGER_FIRED_BEFORE(trigdata->tg_event))\n\t{\n\t\tswitch (phpret->type)\n\t\t{\n\t\t\tcase IS_STRING:\n\t\t\t\tsrv = phpret->value.str.val;\n\t\t\t\tif (strcasecmp(srv, \"SKIP\") == 0)\n\t\t\t\t{\n\t\t\t\t\t/* do nothing */\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\telse if (strcasecmp(srv, \"MODIFY\") == 0)\n\t\t\t\t{\n\t\t\t\t\tif (TRIGGER_FIRED_BY_INSERT(trigdata->tg_event) ||\n\t\t\t\t\t\tTRIGGER_FIRED_BY_UPDATE(trigdata->tg_event))\n\t\t\t\t\t\tretval = PointerGetDatum(plphp_modify_tuple(zTrigData,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttrigdata));\n\t\t\t\t\telse if (TRIGGER_FIRED_BY_DELETE(trigdata->tg_event))\n\t\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t\t\t\t\t errmsg(\"on delete trigger can not modify the the return tuple\")));\n\t\t\t\t\telse\n\t\t\t\t\t\telog(ERROR, \"unknown event in trigger function\");\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t\t\t\t errmsg(\"expected trigger function to return NULL, 'SKIP' or 'MODIFY'\")));\n\t\t\t\tbreak;\n\t\t\tcase IS_NULL:\n\t\t\t\tif (TRIGGER_FIRED_BY_INSERT(trigdata->tg_event) ||\n\t\t\t\t\tTRIGGER_FIRED_BY_DELETE(trigdata->tg_event))\n\t\t\t\t\tretval = (Datum) trigdata->tg_trigtuple;\n\t\t\t\telse if (TRIGGER_FIRED_BY_UPDATE(trigdata->tg_event))\n\t\t\t\t\tretval = (Datum) trigdata->tg_newtuple;\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t\t\t errmsg(\"expected trigger function to return NULL, 'SKIP' or 'MODIFY'\")));\n\t\t\t\tbreak;\n\t\t}\n\t}\n\n\tREPORT_PHP_MEMUSAGE(\"freeing some variables\");\n\n\tzval_dtor(zTrigData);\n\tzval_dtor(phpret);\n\n\tFREE_ZVAL(phpret);\n\tFREE_ZVAL(zTrigData);\n\n\tREPORT_PHP_MEMUSAGE(\"trigger call done\");\n\n\treturn retval;\n}\n\n/*\n * plphp_func_handler\n * \t\tHandler for regular function calls\n */\nstatic Datum\nplphp_func_handler(FunctionCallInfo fcinfo, plphp_proc_desc *desc TSRMLS_DC)\n{\n\tzval\t *phpret = NULL;\n\tDatum\t\tretval;\n\tchar\t *retvalbuffer = NULL;\n\n\t/* SRFs are handled separately */\n\tAssert(!desc->retset);\n\n\t/* Call the PHP function. */\n\tphpret = plphp_call_php_func(desc, fcinfo TSRMLS_CC);\n\tif (!phpret)\n\t\telog(ERROR, \"error during execution of function %s\", desc->proname);\n\n\tREPORT_PHP_MEMUSAGE(\"function invoked\");\n\n\t/* Basic datatype checks */\n\tif ((desc->ret_type & PL_ARRAY) && phpret->type != IS_ARRAY)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_DATATYPE_MISMATCH),\n\t\t\t\t errmsg(\"function declared to return array must return an array\")));\n\tif ((desc->ret_type & PL_TUPLE) && phpret->type != IS_ARRAY)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_DATATYPE_MISMATCH),\n\t\t\t\t errmsg(\"function declared to return tuple must return an array\")));\n\n\t/*\n\t * Disconnect from SPI manager and then create the return values datum (if\n\t * the input function does a palloc for it this must not be allocated in\n\t * the SPI memory context because SPI_finish would free it).\n\t */\n\tif (SPI_finish() != SPI_OK_FINISH)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_CONNECTION_DOES_NOT_EXIST),\n\t\t\t\t errmsg(\"could not disconnect from SPI manager\")));\n\tretval = (Datum) 0;\n\n\tif (desc->ret_type & PL_PSEUDO)\n\t{\n\t\tHeapTuple\tretTypeTup;\n\t\tForm_pg_type retTypeStruct;\n\n\t\tretTypeTup = SearchSysCache(TYPEOID,\n\t\t\t\t\t\t\t\t\tObjectIdGetDatum(get_fn_expr_rettype(fcinfo->flinfo)),\n\t\t\t\t\t\t\t\t\t0, 0, 0);\n\t\tretTypeStruct = (Form_pg_type) GETSTRUCT(retTypeTup);\n\t\tperm_fmgr_info(retTypeStruct->typinput, &(desc->result_in_func));\n\t\tdesc->result_typioparam = retTypeStruct->typelem;\n\t\tReleaseSysCache(retTypeTup);\n\t}\n\n\tif (phpret)\n\t{\n\t\tswitch (Z_TYPE_P(phpret))\n\t\t{\n\t\t\tcase IS_NULL:\n\t\t\t\tfcinfo->isnull = true;\n\t\t\t\tbreak;\n\t\t\tcase IS_BOOL:\n\t\t\tcase IS_DOUBLE:\n\t\t\tcase IS_LONG:\n\t\t\tcase IS_STRING:\n\t\t\t\tretvalbuffer = plphp_zval_get_cstring(phpret, false, false);\n\t\t\t\tretval = CStringGetDatum(retvalbuffer);\n\t\t\t\tbreak;\n\t\t\tcase IS_ARRAY:\n\t\t\t\tif (desc->ret_type & PL_ARRAY)\n\t\t\t\t{\n\t\t\t\t\tretvalbuffer = plphp_convert_to_pg_array(phpret);\n\t\t\t\t\tretval = CStringGetDatum(retvalbuffer);\n\t\t\t\t}\n\t\t\t\telse if (desc->ret_type & PL_TUPLE)\n\t\t\t\t{\n\t\t\t\t\tTupleDesc\ttd;\n\t\t\t\t\tHeapTuple\ttup;\n\n\t\t\t\t\tif (desc->ret_type & PL_PSEUDO)\n\t\t\t\t\t\ttd = plphp_get_function_tupdesc(desc->ret_oid,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tfcinfo->resultinfo);\n\t\t\t\t\telse\n\t\t\t\t\t\ttd = lookup_rowtype_tupdesc(desc->ret_oid, (int32) -1);\n\n\t\t\t\t\tif (!td)\n\t\t\t\t\t\telog(ERROR, \"no TupleDesc info available\");\n\n\t\t\t\t\ttup = plphp_htup_from_zval(phpret, td);\n\t\t\t\t\tretval = HeapTupleGetDatum(tup);\n\t\t\t\t\tReleaseTupleDesc(td);\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t\t/* FIXME -- should return the thing as a string? */\n\t\t\t\t\telog(ERROR, \"this plphp function cannot return arrays\");\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\telog(WARNING,\n\t\t\t\t\t \"plphp functions cannot return type %i\",\n\t\t\t\t\t phpret->type);\n\t\t\t\tfcinfo->isnull = true;\n\t\t\t\tbreak;\n\t\t}\n\t}\n\telse\n\t{\n\t\tfcinfo->isnull = true;\n\t\tretval = (Datum) 0;\n\t}\n\n\tif (!fcinfo->isnull && !(desc->ret_type & PL_TUPLE))\n\t{\n\t\tretval = FunctionCall3(&desc->result_in_func,\n\t\t\t\t\t\t\t PointerGetDatum(retvalbuffer),\n\t\t\t\t\t\t\t ObjectIdGetDatum(desc->result_typioparam),\n\t\t\t\t\t\t\t Int32GetDatum(-1));\n\t\tpfree(retvalbuffer);\n\t}\n\n\tREPORT_PHP_MEMUSAGE(\"finished calling user function\");\n\n\treturn retval;\n}\n\n/*\n * plphp_srf_handler\n * \t\tInvoke a SRF\n */\nstatic Datum\nplphp_srf_handler(FunctionCallInfo fcinfo, plphp_proc_desc *desc TSRMLS_DC)\n{\n\tReturnSetInfo *rsi = (ReturnSetInfo *) fcinfo->resultinfo;\n\tTupleDesc\ttupdesc;\n\tzval\t *phpret;\n\tMemoryContext\toldcxt;\n\n\tAssert(desc->retset);\n\n\tcurrent_fcinfo = fcinfo;\n\tcurrent_tuplestore = NULL;\n\n\t/* Check context before allowing the call to go through */\n\tif (!rsi || !IsA(rsi, ReturnSetInfo) ||\n\t\t(rsi->allowedModes & SFRM_Materialize) == 0 ||\n\t\trsi->expectedDesc == NULL)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t errmsg(\"set-valued function called in context that \"\n\t\t\t\t\t\t\"cannot accept a set\")));\n\n\t/*\n\t * Fetch the function's tuple descriptor. This will return NULL in the\n\t * case of a scalar return type, in which case we will copy the TupleDesc\n\t * from the ReturnSetInfo.\n\t */\n\tget_call_result_type(fcinfo, NULL, &tupdesc);\n\tif (tupdesc == NULL)\n\t\ttupdesc = rsi->expectedDesc;\n\n\t/*\n\t * If the expectedDesc is NULL, bail out, because most likely it's using\n\t * IN/OUT parameters.\n\t */\n\tif (tupdesc == NULL)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t errmsg(\"cannot use IN/OUT parameters in PL/php\")));\n\n\toldcxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);\n\n\t/* This context is reset once per row in return_next */\n\tcurrent_memcxt = AllocSetContextCreate(CurTransactionContext,\n\t\t\t\t\t\t\t\t\t\t \"PL/php SRF context\",\n\t\t\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MINSIZE,\n\t\t\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_INITSIZE,\n\t\t\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MAXSIZE);\n\n\t/* Tuple descriptor and AttInMetadata for return_next */\n\tcurrent_tupledesc = CreateTupleDescCopy(tupdesc);\n\tcurrent_attinmeta = TupleDescGetAttInMetadata(current_tupledesc);\n\n\t/*\n\t * Call the PHP function. The user code must call return_next, which will\n\t * create and populate the tuplestore appropiately.\n\t */\n\tphpret = plphp_call_php_func(desc, fcinfo TSRMLS_CC);\n\n\t/* We don't use the return value */\n\tzval_dtor(phpret);\n\n\t/* Close the SPI connection */\n\tif (SPI_finish() != SPI_OK_FINISH)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_CONNECTION_DOES_NOT_EXIST),\n\t\t\t\t errmsg(\"could not disconnect from SPI manager\")));\n\n\t/* Now prepare the return values. */\n\trsi->returnMode = SFRM_Materialize;\n\n\tif (current_tuplestore)\n\t{\n\t\trsi->setResult = current_tuplestore;\n\t\trsi->setDesc = current_tupledesc;\n\t}\n\n\tMemoryContextDelete(current_memcxt);\n\tcurrent_memcxt = NULL;\n\tcurrent_tupledesc = NULL;\n\tcurrent_attinmeta = NULL;\n\n\tMemoryContextSwitchTo(oldcxt);\n\n\t/* All done */\n\treturn (Datum) 0;\n}\n\n/*\n * plphp_compile_function\n *\n * \t\tCompile (or hopefully just look up) function\n */\nstatic plphp_proc_desc *\nplphp_compile_function(Oid fnoid, bool is_trigger TSRMLS_DC)\n{\n\tHeapTuple\tprocTup;\n\tForm_pg_proc procStruct;\n\tchar\t\tinternal_proname[64];\n\tplphp_proc_desc *prodesc = NULL;\n\tint\t\t\ti;\n\tchar\t *pointer = NULL;\n\n\t/*\n\t * We'll need the pg_proc tuple in any case... \n\t */\n\tprocTup = SearchSysCache(PROCOID, ObjectIdGetDatum(fnoid), 0, 0, 0);\n\tif (!HeapTupleIsValid(procTup))\n\t\telog(ERROR, \"cache lookup failed for function %u\", fnoid);\n\tprocStruct = (Form_pg_proc) GETSTRUCT(procTup);\n\n\t/*\n\t * Build our internal procedure name from the function's Oid\n\t */\n\tif (is_trigger)\n\t\tsnprintf(internal_proname, sizeof(internal_proname),\n\t\t\t\t \"plphp_proc_%u_trigger\", fnoid);\n\telse\n\t\tsnprintf(internal_proname, sizeof(internal_proname),\n\t\t\t\t \"plphp_proc_%u\", fnoid);\n\n\t/*\n\t * Look up the internal proc name in the hashtable\n\t */\n\tpointer = plphp_zval_get_cstring(plphp_array_get_elem(plphp_proc_array,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t internal_proname),\n\t\t\t\t\t\t\t\t\t false, true);\n\tif (pointer)\n\t{\n\t\tbool uptodate;\n\t\tsscanf(pointer, \"%p\", &prodesc);\n\n#ifdef PG_VERSION_83_COMPAT\n\t\t/* PostgreSQL 8.3 doesn't allow calling GetCmin if a tuple doesn't\n\t\t * originate from the current transaction.\n\t\t */\n\t\tuptodate =\n\t\t\t(prodesc->fn_xmin == HeapTupleHeaderGetXmin(procTup->t_data) &&\n\t\t\t prodesc->fn_cmin == HeapTupleHeaderGetRawCommandId(procTup->t_data));\n\n#else\n\t\tuptodate =\n\t\t\t(prodesc->fn_xmin == HeapTupleHeaderGetXmin(procTup->t_data) &&\n\t\t\t prodesc->fn_cmin == HeapTupleHeaderGetCmin(procTup->t_data));\n\n#endif\n\n\n\t\t/* We need to delete the old entry */\n\t\tif (!uptodate)\n\t\t{\n\t\t\t/*\n\t\t\t * FIXME -- use a per-function memory context and fix this\n\t\t\t * stuff for good\n\t\t\t */\n\t\t\tfree(prodesc->proname);\n\t\t\tfree(prodesc);\n\t\t\tprodesc = NULL;\n\t\t}\n\t}\n\n\tif (prodesc == NULL)\n\t{\n\t\tHeapTuple\tlangTup;\n\t\tForm_pg_language langStruct;\n\t\tDatum\t\tprosrcdatum;\n\t\tbool\t\tisnull;\n\t\tchar\t *proc_source;\n\t\tchar\t *complete_proc_source;\n\t\tchar\t *pointer = NULL;\n\t\tchar\t *aliases = NULL;\n\t\tchar\t *out_aliases = NULL;\n\t\tchar\t *out_return_str = NULL;\n\t\tint16\ttyplen;\n\t\tchar\ttypbyval,\n\t\t\t\ttypalign,\n\t\t\t\ttyptype,\n\t\t\t\ttypdelim;\n\t\tOid\t\ttypioparam,\n\t\t\t\ttypinput,\n\t\t\t\ttypoutput;\n\t\t/*\n\t\t * Allocate a new procedure description block\n\t\t */\n\t\tprodesc = (plphp_proc_desc *) malloc(sizeof(plphp_proc_desc));\n\t\tif (!prodesc)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_OUT_OF_MEMORY),\n\t\t\t\t\t errmsg(\"out of memory\")));\n\n\t\tMemSet(prodesc, 0, sizeof(plphp_proc_desc));\n\t\tprodesc->proname = strdup(internal_proname);\n\t\tif (!prodesc->proname)\n\t\t{\n\t\t\tfree(prodesc);\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errcode(ERRCODE_OUT_OF_MEMORY),\n\t\t\t\t\t errmsg(\"out of memory\")));\n\t\t}\n\n\t\tprodesc->fn_xmin = HeapTupleHeaderGetXmin(procTup->t_data);\n\n#ifdef PG_VERSION_83_COMPAT\n\t\t/* PostgreSQL 8.3 doesn't allow calling GetCmin if a tuple doesn't\n\t\t * originate from the current transaction.\n\t\t */\n\t\tprodesc->fn_cmin = HeapTupleHeaderGetRawCommandId(procTup->t_data);\n\n#else\n\t\tprodesc->fn_cmin = HeapTupleHeaderGetCmin(procTup->t_data);\n\n#endif\n\n\t\t/*\n\t\t * Look up the pg_language tuple by Oid\n\t\t */\n\t\tlangTup = SearchSysCache(LANGOID,\n\t\t\t\t\t\t\t\t ObjectIdGetDatum(procStruct->prolang),\n\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\tif (!HeapTupleIsValid(langTup))\n\t\t{\n\t\t\tfree(prodesc->proname);\n\t\t\tfree(prodesc);\n\t\t\telog(ERROR, \"cache lookup failed for language %u\",\n\t\t\t\t\t procStruct->prolang);\n\t\t}\n\t\tlangStruct = (Form_pg_language) GETSTRUCT(langTup);\n\t\tprodesc->trusted = langStruct->lanpltrusted;\n\t\tReleaseSysCache(langTup);\n\n\t\t/*\n\t\t * Get the required information for input conversion of the return\n\t\t * value, and output conversion of the procedure's arguments.\n\t\t */\n\t\tif (!is_trigger)\n\t\t{\n\t\t\tchar **argnames;\n\t\t\tchar *argmodes;\n\t\t\tOid *argtypes;\n\t\t\tint32\talias_str_end,\n\t\t\t\t\tout_str_end;\n\n\t\t\ttyptype = get_typtype(procStruct->prorettype);\n\t\t\tget_type_io_data(procStruct->prorettype,\n\t\t\t\t\t\t\t IOFunc_input,\n\t\t\t\t\t\t\t &typlen,\n\t\t\t\t\t\t\t &typbyval,\n\t\t\t\t\t\t\t &typalign,\n\t\t\t\t\t\t\t &typdelim,\n\t\t\t\t\t\t\t &typioparam,\n\t\t\t\t\t\t\t &typinput);\n\n\t\t\t/*\n\t\t\t * Disallow pseudotype result, except:\n\t\t\t * VOID, RECORD, ANYELEMENT or ANYARRAY\n\t\t\t */\n\t\t\tif (typtype == TYPTYPE_PSEUDO)\n\t\t\t{\n\t\t\t\tif ((procStruct->prorettype == VOIDOID) ||\n\t\t\t\t\t(procStruct->prorettype == RECORDOID) ||\n\t\t\t\t\t(procStruct->prorettype == ANYELEMENTOID) ||\n\t\t\t\t\t(procStruct->prorettype == ANYARRAYOID))\n\t\t\t\t{\n\t\t\t\t\t/* okay */\n\t\t\t\t\tprodesc->ret_type |= PL_PSEUDO;\n\t\t\t\t}\n\t\t\t\telse if (procStruct->prorettype == TRIGGEROID)\n\t\t\t\t{\n\t\t\t\t\tfree(prodesc->proname);\n\t\t\t\t\tfree(prodesc);\n\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t\t\t\t errmsg(\"trigger functions may only be called \"\n\t\t\t\t\t\t\t\t\t\"as triggers\")));\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\tfree(prodesc->proname);\n\t\t\t\t\tfree(prodesc);\n\t\t\t\t\tereport(ERROR,\n\t\t\t\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t\t\t\t errmsg(\"plphp functions cannot return type %s\",\n\t\t\t\t\t\t\t\t\tformat_type_be(procStruct->prorettype))));\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tprodesc->ret_oid = procStruct->prorettype;\n\t\t\tprodesc->retset = procStruct->proretset;\n\n\t\t\tif (typtype == TYPTYPE_COMPOSITE ||\n\t\t\t\tprocStruct->prorettype == RECORDOID)\n\t\t\t{\n\t\t\t\tprodesc->ret_type |= PL_TUPLE;\n\t\t\t}\n\n\t\t\tif (procStruct->prorettype == ANYARRAYOID)\n\t\t\t\tprodesc->ret_type |= PL_ARRAY;\n\t\t\telse\n\t\t\t{\n\t\t\t\t/* function returns a normal (declared) array */\n\t\t\t\tif (typlen == -1 && get_element_type(procStruct->prorettype))\n\t\t\t\t\tprodesc->ret_type |= PL_ARRAY;\n\t\t\t}\n\n\t\t\tperm_fmgr_info(typinput, &(prodesc->result_in_func));\n\t\t\tprodesc->result_typioparam = typioparam;\n\n\t\t\t/* Deal with named arguments, OUT, IN/OUT and TABLE arguments */\n\n\t\t\tprodesc->n_total_args = get_func_arg_info(procTup, &argtypes, \n\t\t\t\t\t\t\t\t\t\t\t \t\t &argnames, &argmodes);\n\t\t\tprodesc->n_out_args = 0;\n\t\t\tprodesc->n_mixed_args = 0;\n\t\t\t\n\t\t\tprodesc->args_out_tupdesc = NULL;\n\t\t\tout_return_str = NULL;\n\t\t\talias_str_end = out_str_end = 0;\n\n\t\t\t/* Count the number of OUT arguments. Need to do this out of the\n\t\t\t * main loop, to correctly determine the object to return for OUT args\n\t\t */\n\t\t\tif (argmodes)\n\t\t\t\tfor (i = 0; i < prodesc->n_total_args; i++)\n\t\t\t\t{\n\t\t\t\t\tswitch(argmodes[i])\n\t\t\t\t\t{\n\t\t\t\t\t\tcase PROARGMODE_OUT: \n\t\t\t\t\t\t\tprodesc->n_out_args++;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_INOUT: \n\t\t\t\t\t\t\tprodesc->n_mixed_args++;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_IN:\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_TABLE:\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase PROARGMODE_VARIADIC:\n\t\t\t\t\t\t\telog(ERROR, \"VARIADIC arguments are not supported\");\n\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\telog(ERROR, \"Unsupported type %c for argument no %d\",\n\t\t\t\t\t\t\t\t argmodes[i], i);\n\t\t\t\t\t}\t\t\t\t\t\n\t\t\t\t\tprodesc->arg_argmode[i] = argmodes[i];\n\t\t\t\t}\n\t\t\telse\n\t\t\t\tMemSet(prodesc->arg_argmode, PROARGMODE_IN,\n\t\t\t\t \t prodesc->n_total_args);\n\n\t\t\t/* Allocate memory for argument names unless all of them are OUT*/\n\t\t\tif (argnames && prodesc->n_total_args > 0)\n\t\t\t\taliases = palloc((NAMEDATALEN + 32) * prodesc->n_total_args);\n\t\t\t\n\t\t\t/* Main argument processing loop. */\n\t\t\tfor (i = 0; i < prodesc->n_total_args; i++)\n\t\t\t{\n\t\t\t\tprodesc->arg_typtype[i] = get_typtype(argtypes[i]);\n\t\t\t\tif (prodesc->arg_typtype[i] != TYPTYPE_COMPOSITE)\n\t\t\t\t{\t\t\t\t\t\t\t\n\t\t\t\t\tget_type_io_data(argtypes[i],\n\t\t\t\t\t\t\t\t\t IOFunc_output,\n\t\t\t\t\t\t\t\t\t &typlen,\n\t\t\t\t\t\t\t\t\t &typbyval,\n\t\t\t\t\t\t\t\t\t &typalign,\n\t\t\t\t\t\t\t\t\t &typdelim,\n\t\t\t\t\t\t\t\t\t &typioparam,\n\t\t\t\t\t\t\t\t\t &typoutput);\n\t\t\t\t\tperm_fmgr_info(typoutput, &(prodesc->arg_out_func[i]));\n\t\t\t\t\tprodesc->arg_typioparam[i] = typioparam;\n\t\t\t\t}\n\t\t\t\tif (aliases && argnames[i][0] != '\\0')\n\t\t\t\t{\n\t\t\t\t\tif (!is_valid_php_identifier(argnames[i]))\n\t\t\t\t\t\telog(ERROR, \"\\\"%s\\\" can not be used as a PHP variable name\",\n\t\t\t\t\t\t\t argnames[i]);\n\t\t\t\t\t/* Deal with argument name */\n\t\t\t\t\talias_str_end += snprintf(aliases + alias_str_end,\n\t\t\t\t\t\t\t\t\t\t \t NAMEDATALEN + 32,\n\t\t\t\t\t\t\t\t \t\t \t \" $%s = &$args[%d];\", \n\t\t\t\t\t\t\t\t\t\t\t argnames[i], i);\n\t\t\t\t}\n\t\t\t\tif ((prodesc->arg_argmode[i] == PROARGMODE_OUT ||\n\t\t\t\t\t prodesc->arg_argmode[i] == PROARGMODE_INOUT) && !prodesc->retset)\n\t\t\t\t{\n\t\t\t\t\t/* Initialiazation for OUT arguments aliases */\n\t\t\t\t\tif (!out_return_str)\n\t\t\t\t\t{\n\t\t\t\t\t\t/* Generate return statment for a single OUT argument */\n\t\t\t\t\t\tout_return_str = palloc(NAMEDATALEN + 32);\n\t\t\t\t\t\tif (prodesc->n_out_args + prodesc->n_mixed_args == 1)\n\t\t\t\t\t\t\tsnprintf(out_return_str, NAMEDATALEN + 32,\n\t\t\t\t\t\t\t\t\t \"return $args[%d];\", i);\n\t\t\t\t\t\telse\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t/* PL/PHP deals with multiple OUT arguments by\n\t\t\t\t\t\t\t * internally creating an array of references to them.\n\t\t\t\t\t\t\t * E.g. out_fn(a out integer, b out integer )\n\t\t\t\t\t\t\t * translates into:\n\t\t\t\t\t\t\t * $_plphp_ret_out_fn_1234=array(a => $&a,b => $&b);\n\t\t\t\t\t\t\t */\n\t\t\t\t\t\t\tchar plphp_ret_array_name[NAMEDATALEN + 16];\n\n\t\t\t\t\t\t\tint array_namelen = snprintf(plphp_ret_array_name,\n\t\t\t\t\t\t\t \t\t\t\t\t\t \t NAMEDATALEN + 16,\n\t\t\t\t\t\t\t\t\t \t\t\t\t \t \"_plphp_ret_%s\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t \t internal_proname);\n\n\t\t\t\t\t\t\tsnprintf(out_return_str, array_namelen + 16,\n\t\t\t\t\t\t\t\t\t\"return $%s;\", plphp_ret_array_name);\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t/* 2 NAMEDATALEN for argument names, additional\n\t\t\t\t\t\t\t * 16 bytes per each argument for assignment string,\n\t\t\t\t\t\t\t * additional 16 bytes for the 'array' prefix string.\n\t\t\t\t\t\t\t */\t\t\n\t\t\t\t\t\t\tout_aliases = palloc(array_namelen +\n\t\t\t\t\t\t\t\t\t\t\t\t (prodesc->n_out_args + \n\t\t\t\t\t\t\t\t\t\t\t\t prodesc->n_mixed_args) *\n\t\t\t\t\t\t\t\t\t\t\t\t (2*NAMEDATALEN + 16) + 16);\n\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\tout_str_end = snprintf(out_aliases,\n\t\t\t\t\t\t\t \t\t\t\t\t array_namelen +\n\t\t\t\t\t\t\t\t\t\t\t\t (2 * NAMEDATALEN + 16) + 16,\n\t\t\t\t\t\t\t\t\t\t\t\t \"$%s = array(&$args[%d]\", \n\t\t\t\t\t\t\t\t\t\t\t\t plphp_ret_array_name, i);\n\t\t\t\t\t\t\t\t\t\t\t\t \n\t\t\t\t\t\t}\n\t\t\t\t\t} \n\t\t\t\t\telse if (out_aliases)\n\t\t\t\t\t{\n\t\t\t\t\t /* Add new elements to the array of aliases for OUT args */\n\t\t\t\t\t\tAssert(prodesc->n_out_args + prodesc->n_mixed_args > 1);\n\t\t\t\t\t\tout_str_end += snprintf(out_aliases+out_str_end,\n\t\t\t\t\t\t\t\t\t\t\t\t2 * NAMEDATALEN + 16,\n\t\t\t\t\t\t\t\t\t\t\t\t\",&$args[%d]\", i);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (aliases)\n\t\t\t\tstrcat(aliases, \" \");\n\t\t\tif (out_aliases)\n\t\t\t\tstrcat(out_aliases, \")\");\n\t\t}\n\n\t\t/*\n\t\t * Create the text of the PHP function. We do not use the same\n\t\t * function name, because that would prevent function overloading.\n\t\t * Sadly this also prevents PL/php functions from calling each other\n\t\t * easily.\n\t\t */\n\t\tprosrcdatum = SysCacheGetAttr(PROCOID, procTup,\n\t\t\t\t\t\t\t\t\t Anum_pg_proc_prosrc, &isnull);\n\t\tif (isnull)\n\t\t\telog(ERROR, \"cache lookup yielded NULL prosrc\");\n\n\t\tproc_source = DatumGetCString(DirectFunctionCall1(textout,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t prosrcdatum));\n\n\t\t/* Create the procedure in the interpreter */\n\t\tcomplete_proc_source =\n\t\t\t(char *) palloc(strlen(proc_source) +\n\t\t\t\t\t\t\tstrlen(internal_proname) +\n\t\t\t\t\t\t\t(aliases ? strlen(aliases) : 0) + \n\t\t\t\t\t\t\t(out_aliases ? strlen(out_aliases) : 0) +\n\t\t\t\t\t\t\tstrlen(\"function ($args, $argc){ } \") + 32 +\n\t\t\t\t\t\t\t(out_return_str ? strlen(out_return_str) : 0));\n\n\t\t/* XXX Is this usage of sprintf safe? */\n\t\tif (is_trigger)\n\t\t\tsprintf(complete_proc_source, \"function %s($_TD){%s}\",\n\t\t\t\t\tinternal_proname, proc_source);\n\t\telse\n\t\t\tsprintf(complete_proc_source, \n\t\t\t\t\t\"function %s($args, $argc){%s %s;%s; %s}\",\n\t\t\t\t\tinternal_proname, \n\t\t\t\t\taliases ? aliases : \"\",\n\t\t\t\t\tout_aliases ? out_aliases : \"\",\n\t\t\t\t\tproc_source, \n\t\t\t\t\tout_return_str? out_return_str : \"\");\n\t\t\t\t\t\n\t\telog(LOG, \"complete_proc_source = %s\",\n\t\t\t\t \t complete_proc_source);\n\t\t\t\t\n\t\tzend_hash_del(CG(function_table), prodesc->proname,\n\t\t\t\t\t strlen(prodesc->proname) + 1);\n\n\t\tpointer = (char *) palloc(64);\n\t\tsprintf(pointer, \"%p\", (void *) prodesc);\n\t\tadd_assoc_string(plphp_proc_array, internal_proname,\n\t\t\t\t\t\t (char *) pointer, 1);\n\n\t\tif (zend_eval_string(complete_proc_source, NULL,\n\t\t\t\t\t\t\t \"plphp function source\" TSRMLS_CC) == FAILURE)\n\t\t{\n\t\t\t/* the next compilation will blow it up */\n\t\t\tprodesc->fn_xmin = InvalidTransactionId;\n\t\t\telog(ERROR, \"unable to compile function \\\"%s\\\"\",\n\t\t\t\t\t prodesc->proname);\n\t\t}\n\n\t\tif (aliases)\n\t\t\tpfree(aliases);\n\t\tif (out_aliases)\n\t\t\tpfree(out_aliases);\n\t\tif (out_return_str)\n\t\t\tpfree(out_return_str);\n\t\tpfree(complete_proc_source);\n\t}\n\n\tReleaseSysCache(procTup);\n\n\treturn prodesc;\n}\n\n/*\n * plphp_func_build_args\n * \t\tBuild a PHP array representing the arguments to the function\n */\nstatic zval *\nplphp_func_build_args(plphp_proc_desc *desc, FunctionCallInfo fcinfo TSRMLS_DC)\n{\n\tzval\t *retval;\n\tint\t\t\ti,j;\n\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\t/* \n\t * The first var iterates over every argument, the second one - over the \n\t * IN or INOUT ones only\n\t */\n\tfor (i = 0, j = 0; i < desc->n_total_args; \n\t\t (j = IS_ARGMODE_OUT(desc->arg_argmode[i]) ? j : j + 1), i++)\n\t{\n\t\t/* Assing NULLs to OUT or TABLE arguments initially */\n\t\tif (IS_ARGMODE_OUT(desc->arg_argmode[i]))\n\t\t{\n\t\t\tadd_next_index_unset(retval);\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (desc->arg_typtype[i] == TYPTYPE_PSEUDO)\n\t\t{\n\t\t\tHeapTuple\ttypeTup;\n\t\t\tForm_pg_type typeStruct;\n\n\t\t\ttypeTup = SearchSysCache(TYPEOID,\n\t\t\t\t\t\t\t\t\t ObjectIdGetDatum(get_fn_expr_argtype\n\t\t\t\t\t\t\t\t\t\t\t\t\t (fcinfo->flinfo, j)),\n\t\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\t\ttypeStruct = (Form_pg_type) GETSTRUCT(typeTup);\n\t\t\tperm_fmgr_info(typeStruct->typoutput,\n\t\t\t\t\t\t &(desc->arg_out_func[i]));\n\t\t\tdesc->arg_typioparam[i] = typeStruct->typelem;\n\t\t\tReleaseSysCache(typeTup);\n\t\t}\n\n\t\tif (desc->arg_typtype[i] == TYPTYPE_COMPOSITE)\n\t\t{\n\t\t\tif (fcinfo->argnull[j])\n\t\t\t\tadd_next_index_unset(retval);\n\t\t\telse\n\t\t\t{\n\t\t\t\tHeapTupleHeader\ttd;\n\t\t\t\tOid\t\t\t\ttupType;\n\t\t\t\tint32\t\t\ttupTypmod;\n\t\t\t\tTupleDesc\t\ttupdesc;\n\t\t\t\tHeapTupleData\ttmptup;\n\t\t\t\tzval\t\t *hashref;\n\n\t\t\t\ttd = DatumGetHeapTupleHeader(fcinfo->arg[j]);\n\n\t\t\t\t/* Build a temporary HeapTuple control structure */\n\t\t\t\ttmptup.t_len = HeapTupleHeaderGetDatumLength(td);\n\t\t\t\ttmptup.t_data = DatumGetHeapTupleHeader(fcinfo->arg[j]);\n\n\t\t\t\t/* Extract rowtype info and find a tupdesc */\n\t\t\t\ttupType = HeapTupleHeaderGetTypeId(tmptup.t_data);\n\t\t\t\ttupTypmod = HeapTupleHeaderGetTypMod(tmptup.t_data);\n\t\t\t\ttupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);\n\n\t\t\t\t/* Build the PHP hash */\n\t\t\t\thashref = plphp_build_tuple_argument(&tmptup, tupdesc);\n\t\t\t\tzend_hash_next_index_insert(retval->value.ht,\n\t\t\t\t\t\t\t\t\t\t\t(void *) &hashref,\n\t\t\t\t\t\t\t\t\t\t\tsizeof(zval *), NULL);\n\t\t\t\t/* Finally release the acquired tupledesc */\n\t\t\t\tReleaseTupleDesc(tupdesc);\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\tif (fcinfo->argnull[j])\n\t\t\t\tadd_next_index_unset(retval);\n\t\t\telse\n\t\t\t{\n\t\t\t\tchar\t *tmp;\n\n\t\t\t\t/*\n\t\t\t\t * TODO room for improvement here: instead of going through the\n\t\t\t\t * output function, figure out if we can just use the native\n\t\t\t\t * representation to pass to PHP.\n\t\t\t\t */\n\n\t\t\t\ttmp =\n\t\t\t\t\tDatumGetCString(FunctionCall3\n\t\t\t\t\t\t\t\t\t(&(desc->arg_out_func[i]),\n\t\t\t\t\t\t\t\t\t fcinfo->arg[j],\n\t\t\t\t\t\t\t\t\t ObjectIdGetDatum(desc->arg_typioparam[i]),\n\t\t\t\t\t\t\t\t\t Int32GetDatum(-1)));\n\t\t\t\t/*\n\t\t\t\t * FIXME -- this is bogus. Not every value starting with { is\n\t\t\t\t * an array. Figure out a better method for detecting arrays.\n\t\t\t\t */\n\t\t\t\tif (tmp[0] == '{')\n\t\t\t\t{\n\t\t\t\t\tzval\t *hashref;\n\n\t\t\t\t\thashref = plphp_convert_from_pg_array(tmp TSRMLS_CC);\n\t\t\t\t\tzend_hash_next_index_insert(retval->value.ht,\n\t\t\t\t\t\t\t\t\t\t\t\t(void *) &hashref,\n\t\t\t\t\t\t\t\t\t\t\t\tsizeof(zval *), NULL);\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t\tadd_next_index_string(retval, tmp, 1);\n\n\t\t\t\t/*\n\t\t\t\t * FIXME - figure out which parameters are passed by\n\t\t\t\t * reference and need freeing\n\t\t\t\t */\n\t\t\t\t/* pfree(tmp); */\n\t\t\t}\n\t\t}\n\t}\n\n\treturn retval;\n}\n\n/*\n * plphp_call_php_func\n * \t\tBuild the function argument array and call the PHP function.\n *\n * We use a private PHP symbol table, so that we can easily destroy everything\n * used during the execution of the function. We use it to collect the\n * arguments' zvals as well. We exclude the return value, because it will be\n * used by the caller -- it must be freed there!\n */\nstatic zval *\nplphp_call_php_func(plphp_proc_desc *desc, FunctionCallInfo fcinfo TSRMLS_DC)\n{\n\tzval\t *retval;\n\tzval\t *args;\n\tzval\t *argc;\n\tzval\t *funcname;\n\tzval\t **params[2];\n\tchar\t\tcall[64];\n\tHashTable *orig_symbol_table;\n\tHashTable *symbol_table;\n\n\tREPORT_PHP_MEMUSAGE(\"going to build function args\");\n\n\tALLOC_HASHTABLE(symbol_table);\n\tzend_hash_init(symbol_table, 0, NULL, ZVAL_PTR_DTOR, 0);\n\n\t/*\n\t * Build the function arguments. Save a pointer to each new zval in our\n\t * private symbol table, so that we can clean up easily later.\n\t */\n\targs = plphp_func_build_args(desc, fcinfo TSRMLS_CC);\n\tzend_hash_update(symbol_table, \"args\", strlen(\"args\") + 1,\n\t\t\t\t\t (void *) &args, sizeof(zval *), NULL);\n\n\tREPORT_PHP_MEMUSAGE(\"args built. Now the rest ...\");\n\n\tMAKE_STD_ZVAL(argc);\n\tZVAL_LONG(argc, desc->n_total_args);\n\tzend_hash_update(symbol_table, \"argc\", strlen(\"argc\") + 1,\n\t\t\t\t\t (void *) &argc, sizeof(zval *), NULL);\n\n\tparams[0] = &args;\n\tparams[1] = &argc;\n\n\t/* Build the internal function name, and save for later cleaning */\n\tsprintf(call, \"plphp_proc_%u\", fcinfo->flinfo->fn_oid);\n\tMAKE_STD_ZVAL(funcname);\n\tZVAL_STRING(funcname, call, 1);\n\tzend_hash_update(symbol_table, \"funcname\", strlen(\"funcname\") + 1,\n\t\t\t\t\t (void *) &funcname, sizeof(zval *), NULL);\n\n\tREPORT_PHP_MEMUSAGE(\"going to call the function\");\n\n\torig_symbol_table = EG(active_symbol_table);\n\tEG(active_symbol_table) = symbol_table;\n\n\tsaved_symbol_table = EG(active_symbol_table);\n\n\t/* XXX: why no_separation param is 1 is this call ? */\n\tif (call_user_function_ex(CG(function_table), NULL, funcname, &retval,\n\t\t\t\t\t\t\t 2, params, 1, symbol_table TSRMLS_CC) == FAILURE)\n\t\telog(ERROR, \"could not call function \\\"%s\\\"\", call);\n\n\tREPORT_PHP_MEMUSAGE(\"going to free some vars\");\n\n\tsaved_symbol_table = NULL;\n\n\t/* Return to the original symbol table, and clean our private one */\n\tEG(active_symbol_table) = orig_symbol_table;\n\tzend_hash_clean(symbol_table);\n\n\tREPORT_PHP_MEMUSAGE(\"function call done\");\n\n\treturn retval;\n}\n\n/*\n * plphp_call_php_trig\n * \t\tBuild trigger argument array and call the PHP function as a\n * \t\ttrigger.\n *\n * Note we don't need to change the symbol table here like we do in\n * plphp_call_php_func, because we do manual cleaning of each zval used.\n */\nstatic zval *\nplphp_call_php_trig(plphp_proc_desc *desc, FunctionCallInfo fcinfo,\n\t\t\t\t\tzval *trigdata TSRMLS_DC)\n{\n\tzval\t *retval;\n\tzval\t *funcname;\n\tchar\t\tcall[64];\n\tzval\t **params[1];\n\n\tparams[0] = &trigdata;\n\n\t/* Build the internal function name, and save for later cleaning */\n\tsprintf(call, \"plphp_proc_%u_trigger\", fcinfo->flinfo->fn_oid);\n\tMAKE_STD_ZVAL(funcname);\n\tZVAL_STRING(funcname, call, 0);\n\n\t/*\n\t * HACK: mark trigdata as a reference, so it won't be copied in\n\t * call_user_function_ex. This way the user function will be able to \n\t * modify it, in order to change NEW.\n\t */\n\tZ_SET_ISREF_P(trigdata);\n\n\tif (call_user_function_ex(CG(function_table), NULL, funcname, &retval,\n\t\t\t\t\t\t\t 1, params, 1, NULL TSRMLS_CC) == FAILURE)\n\t\telog(ERROR, \"could not call function \\\"%s\\\"\", call);\n\n\tFREE_ZVAL(funcname);\n\n\t/* Return to the original state */\n\tZ_UNSET_ISREF_P(trigdata);\n\n\treturn retval;\n}\n\n/*\n * plphp_error_cb\n *\n * A callback for PHP error handling. This is called when the php_error or\n * zend_error function is invoked in our code. Ideally this function should\n * clean up the PHP state after an ERROR, but zend_try blocks do not seem\n * to work as I'd expect. So for now, we degrade the error to WARNING and \n * continue executing in the hope that the system doesn't crash later.\n *\n * Note that we do clean up some PHP state by hand but it doesn't seem to\n * work as expected either.\n */\nvoid\nplphp_error_cb(int type, const char *filename, const uint lineno,\n\t \t\t const char *fmt, va_list args)\n{\n\tchar\tstr[1024];\n\tint\t\televel;\n\n\tvsnprintf(str, 1024, fmt, args);\n\n\t/*\n\t * PHP error classification is a bitmask, so this conversion is a bit\n\t * bogus. However, most calls to php_error() use a single bit.\n\t * Whenever more than one is used, we will default to ERROR, so this is\n\t * safe, if a bit excessive.\n\t *\n\t * XXX -- I wonder whether we should promote the WARNINGs to errors as\n\t * well. PHP has a really stupid way of continuing execution in presence\n\t * of severe problems that I don't see why we should maintain.\n\t */\n\tswitch (type)\n\t{\n\t\tcase E_ERROR:\n\t\tcase E_CORE_ERROR:\n\t\tcase E_COMPILE_ERROR:\n\t\tcase E_USER_ERROR:\n\t\tcase E_PARSE:\n\t\t\televel = ERROR;\n\t\t\tbreak;\n\t\tcase E_WARNING:\n\t\tcase E_CORE_WARNING:\n\t\tcase E_COMPILE_WARNING:\n\t\tcase E_USER_WARNING:\n\t\tcase E_STRICT:\n\t\t\televel = WARNING;\n\t\t\tbreak;\n\t\tcase E_NOTICE:\n\t\tcase E_USER_NOTICE:\n\t\t\televel = NOTICE;\n\t\t\tbreak;\n\t\tdefault:\n\t\t\televel = ERROR;\n\t\t\tbreak;\n\t}\n\n\tREPORT_PHP_MEMUSAGE(\"reporting error\");\n\n\t/*\n\t * If this is a severe problem, we need to make PHP aware of it, so first\n\t * save the error message and then bail out of the PHP block. With luck,\n\t * this will be trapped by a zend_try/zend_catch block outwards in PL/php\n\t * code, which would translate it to a Postgres elog(ERROR), leaving\n\t * everything in a consistent state.\n\t *\n\t * For this to work, there must be a try/catch block covering every place\n\t * where PHP may raise an error!\n\t */\n\tif (elevel >= ERROR)\n\t{\n\t\tif (lineno != 0)\n\t\t{\n\t\t\tchar\tmsgline[1024];\n\t\t\tsnprintf(msgline, sizeof(msgline), \"%s at line %d\", str, lineno);\n\t\t\terror_msg = pstrdup(msgline);\n\t\t}\n\t\telse\n\t\t\terror_msg = pstrdup(str);\n\n\t\tzend_bailout();\n\t}\n\n\tereport(elevel,\n\t\t\t(errmsg(\"plphp: %s\", str)));\n}\n\n/* Check if the name can be a valid PHP variable name */\nstatic bool \nis_valid_php_identifier(char *name)\n{\n\tint \tlen,\n\t\t\ti;\n\t\n\tAssert(name);\n\n\tlen = strlen(name);\n\n\t/* Should start from the letter */\n\tif (!isalpha(name[0]))\n\t\treturn false;\n\tfor (i = 1; i < len; i++)\n\t{\n\t\t/* Only letters, digits and underscores are allowed */\n\t\tif (!isalpha(name[i]) && !isdigit(name[i]) && name[i] != '_')\n\t\t\treturn false;\n\t}\n\treturn true;\n}\n\n/*\n * vim:ts=4:sw=4:cino=(0\n */\n", "plphp_io.c": "/**********************************************************************\n * plphp_io.c\n *\n * Support functions for PL/php -- mainly functions to convert stuff\n * from the PHP representation to PostgreSQL representation and vice\n * versa, either text or binary representations.\n *\n * $Id$\n *\n **********************************************************************/\n\n#include \"postgres.h\"\n#include \"plphp_io.h\"\n\n#include \"catalog/pg_type.h\"\n#include \"executor/spi.h\"\n#include \"funcapi.h\"\n#include \"lib/stringinfo.h\"\n#include \"utils/lsyscache.h\"\n#include \"utils/rel.h\"\n#include \"utils/syscache.h\"\n#include \"utils/memutils.h\"\n#include \"access/htup_details.h\"\n\n/*\n * plphp_zval_from_tuple\n *\t\t Build a PHP hash from a tuple.\n */\nzval *\nplphp_zval_from_tuple(HeapTuple tuple, TupleDesc tupdesc)\n{\n\tint\t\t\ti;\n\tchar\t *attname = NULL;\n\tzval\t *array;\n\n\tMAKE_STD_ZVAL(array);\n\tarray_init(array);\n\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\tchar *attdata;\n\n\t\t/* Get the attribute name */\n\t\tattname = tupdesc->attrs[i]->attname.data;\n\n\t\t/* and get its value */\n\t\tif ((attdata = SPI_getvalue(tuple, tupdesc, i + 1)) != NULL)\n\t\t{\n\t\t\t/* \"true\" means strdup the string */\n\t\t\tadd_assoc_string(array, attname, attdata, true);\n\t\t\tpfree(attdata);\n\t\t}\n\t\telse\n\t\t\tadd_assoc_null(array, attname);\n\t}\n\treturn array;\n}\n\n/*\n * plphp_htup_from_zval\n * \t\tBuild a HeapTuple from a zval (which must be an array) and a TupleDesc.\n *\n * The return HeapTuple is allocated in the current memory context and must\n * be freed by the caller.\n *\n * If zval doesn't contain any of the element names from the TupleDesc,\n * build a tuple from the first N elements. This allows us to accept\n * arrays in form array(1,2,3) as the result of functions with OUT arguments.\n * XXX -- possible optimization: keep the memory context created and only\n * reset it between calls.\n */\nHeapTuple\nplphp_htup_from_zval(zval *val, TupleDesc tupdesc)\n{\n\tMemoryContext\toldcxt;\n\tMemoryContext\ttmpcxt;\n\tHeapTuple\t\tret;\n\tAttInMetadata *attinmeta;\n\tHashPosition\tpos;\n\tzval\t\t **element;\n\tchar\t\t **values;\n\tint\t\t\t\ti;\n\tbool\t\t\tallempty = true;\n\n\ttmpcxt = AllocSetContextCreate(TopTransactionContext,\n\t\t\t\t\t\t\t\t \"htup_from_zval cxt\",\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MINSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_INITSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MAXSIZE);\n\toldcxt = MemoryContextSwitchTo(tmpcxt);\n\n\tvalues = (char **) palloc(tupdesc->natts * sizeof(char *));\n\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\tchar *key = SPI_fname(tupdesc, i + 1);\n\t\tzval *scalarval = plphp_array_get_elem(val, key);\n\n\t\tvalues[i] = plphp_zval_get_cstring(scalarval, true, true);\n\t\t/* \n\t\t * Reset the flag is even one of the keys actually exists,\n\t\t * even if it is NULL.\n\t\t */\n\t\tif (scalarval != NULL)\n\t\t\tallempty = false;\n\t}\n\t/* None of the names from the tuple exists,\n\t * try to get 1st N array elements and assign them to the tuple\n\t */\n\tif (allempty)\n\t\tfor (i = 0, \n\t\t\t zend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(val), &pos);\n\t\t\t (zend_hash_get_current_data_ex(Z_ARRVAL_P(val), \n\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t\t&pos) == SUCCESS) && \n\t\t\t(i < tupdesc->natts);\n\t\t\tzend_hash_move_forward_ex(Z_ARRVAL_P(val), &pos), i++)\n\t\t\tvalues[i] = plphp_zval_get_cstring(element[0], true, true);\n\n\tattinmeta = TupleDescGetAttInMetadata(tupdesc);\n\n\tMemoryContextSwitchTo(oldcxt);\n\tret = BuildTupleFromCStrings(attinmeta, values);\n\n\tMemoryContextDelete(tmpcxt);\n\n\treturn ret;\n}\n\n\n/* plphp_srf_htup_from_zval\n * \t\tBuild a tuple from a zval and a TupleDesc, for a SRF.\n *\n * Like above, but we don't use the names of the array attributes;\n * rather we build the tuple in order. Also, we get a MemoryContext\n * from the caller and just clean it at return, rather than building it each\n * time.\n */\nHeapTuple\nplphp_srf_htup_from_zval(zval *val, AttInMetadata *attinmeta,\n\t\t\t\t\t\t MemoryContext cxt)\n{\n\tMemoryContext\toldcxt;\n\tHeapTuple\t\tret;\n\tHashPosition\tpos;\n\tchar\t\t **values;\n\tzval\t\t **element;\n\tint\t\t\t\ti = 0;\n\n\toldcxt = MemoryContextSwitchTo(cxt);\n\n\t/*\n\t * Use palloc0 to initialize values to NULL, just in case the user does\n\t * not pass all needed attributes\n\t */\n\tvalues = (char **) palloc0(attinmeta->tupdesc->natts * sizeof(char *));\n\n\t/*\n\t * If the input zval is an array, build a tuple using each element as an\n\t * attribute. Exception: if the return tuple has a single element and\n\t * it's an array type, use the whole array as a single value.\n\t *\n\t * If the input zval is a scalar, use it as an element directly.\n\t */\n\tif (Z_TYPE_P(val) == IS_ARRAY)\n\t{\n\t\tif (attinmeta->tupdesc->natts == 1)\n\t\t{\n\t\t\t/* Is it an array? */\n\t\t\tif (attinmeta->tupdesc->attrs[0]->attndims != 0 ||\n\t\t\t\t!OidIsValid(get_element_type(attinmeta->tupdesc->attrs[0]->atttypid)))\n\t\t\t{\n\t\t\t\tzend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(val), &pos);\n\t\t\t\tzend_hash_get_current_data_ex(Z_ARRVAL_P(val),\n\t\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t\t &pos);\n\t\t\t\tvalues[0] = plphp_zval_get_cstring(element[0], true, true);\n\t\t\t}\n\t\t\telse\n\t\t\t\tvalues[0] = plphp_zval_get_cstring(val, true, true);\n\t\t}\n\t\telse\n\t\t{\n\t\t\t/*\n\t\t\t * Ok, it's an array and the return tuple has more than one\n\t\t\t * attribute, so scan each array element.\n\t\t\t */\n\t\t\tfor (zend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(val), &pos);\n\t\t\t\t zend_hash_get_current_data_ex(Z_ARRVAL_P(val),\n\t\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t\t &pos) == SUCCESS;\n\t\t\t\t zend_hash_move_forward_ex(Z_ARRVAL_P(val), &pos))\n\t\t\t{\n\t\t\t\t/* avoid overrunning the palloc'ed chunk */\n\t\t\t\tif (i >= attinmeta->tupdesc->natts)\n\t\t\t\t{\n\t\t\t\t\telog(WARNING, \"more elements in array than attributes in return type\");\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\n\t\t\t\tvalues[i++] = plphp_zval_get_cstring(element[0], true, true);\n\t\t\t}\n\t\t}\n\t}\n\telse\n\t{\n\t\t/* The passed zval is not an array -- use as the only attribute */\n\t\tif (attinmeta->tupdesc->natts != 1)\n\t\t\tereport(ERROR,\n\t\t\t\t\t(errmsg(\"returned array does not correspond to \"\n\t\t\t\t\t\t\t\"declared return value\")));\n\n\t\tvalues[0] = plphp_zval_get_cstring(val, true, true);\n\t}\n\n\tMemoryContextSwitchTo(oldcxt);\n\n\tret = BuildTupleFromCStrings(attinmeta, values);\n\n\tMemoryContextReset(cxt);\n\n\treturn ret;\n}\n\n/*\n * plphp_convert_to_pg_array\n * \t\tConvert a zval into a Postgres text array representation.\n *\n * The return value is palloc'ed in the current memory context and\n * must be freed by the caller.\n */\nchar *\nplphp_convert_to_pg_array(zval *array)\n{\n\tint\t\t\tarr_size;\n\tzval\t **element;\n\tint\t\t\ti = 0;\n\tHashPosition \tpos;\n\tStringInfoData\tstr;\n\t\n\tinitStringInfo(&str);\n\n\tarr_size = zend_hash_num_elements(Z_ARRVAL_P(array));\n\n\tappendStringInfoChar(&str, '{');\n\tif (Z_TYPE_P(array) == IS_ARRAY)\n\t{\n\t\tfor (zend_hash_internal_pointer_reset_ex(Z_ARRVAL_P(array), &pos);\n\t\t\t zend_hash_get_current_data_ex(Z_ARRVAL_P(array),\n\t\t\t\t\t\t\t\t\t\t (void **) &element,\n\t\t\t\t\t\t\t\t\t\t &pos) == SUCCESS;\n\t\t\t zend_hash_move_forward_ex(Z_ARRVAL_P(array), &pos))\n\t\t{\n\t\t\tchar *tmp;\n\n\t\t\tswitch (Z_TYPE_P(element[0]))\n\t\t\t{\n\t\t\t\tcase IS_LONG:\n\t\t\t\t\tappendStringInfo(&str, \"%li\", element[0]->value.lval);\n\t\t\t\t\tbreak;\n\t\t\t\tcase IS_DOUBLE:\n\t\t\t\t\tappendStringInfo(&str, \"%f\", element[0]->value.dval);\n\t\t\t\t\tbreak;\n\t\t\t\tcase IS_STRING:\n\t\t\t\t\tappendStringInfo(&str, \"\\\"%s\\\"\", element[0]->value.str.val);\n\t\t\t\t\tbreak;\n\t\t\t\tcase IS_ARRAY:\n\t\t\t\t\ttmp = plphp_convert_to_pg_array(element[0]);\n\t\t\t\t\tappendStringInfo(&str, \"%s\", tmp);\n\t\t\t\t\tpfree(tmp);\n\t\t\t\t\tbreak;\n\t\t\t\tdefault:\n\t\t\t\t\telog(ERROR, \"unrecognized element type %d\",\n\t\t\t\t\t\t Z_TYPE_P(element[0]));\n\t\t\t}\n\n\t\t\tif (i != arr_size - 1)\n\t\t\t\tappendStringInfoChar(&str, ',');\n\t\t\ti++;\n\t\t}\n\t}\n\n\tappendStringInfoChar(&str, '}');\n\n\treturn str.data;\n}\n\n/*\n * plphp_convert_from_pg_array\n * \t\tConvert a Postgres text array representation to a PHP array\n * \t\t(zval type thing).\n *\n * FIXME -- does not work if there are embedded {'s in the input value.\n *\n * FIXME -- does not correctly quote/dequote the values\n */\nzval *\nplphp_convert_from_pg_array(char *input TSRMLS_DC)\n{\n\tzval\t *retval = NULL;\n\tint\t\t\ti;\n\tStringInfoData str;\n\t\n\tinitStringInfo(&str);\n\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\tfor (i = 0; i < strlen(input); i++)\n\t{\n\t\tif (input[i] == '{')\n\t\t\tappendStringInfoString(&str, \"array(\");\n\t\telse if (input[i] == '}')\n\t\t\tappendStringInfoChar(&str, ')');\n\t\telse\n\t\t\tappendStringInfoChar(&str, input[i]);\n\t}\n\tappendStringInfoChar(&str, ';');\n\n\tif (zend_eval_string(str.data, retval,\n\t\t\t\t\t\t \"plphp array input parameter\" TSRMLS_CC) == FAILURE)\n\t\telog(ERROR, \"plphp: convert to internal representation failure\");\n\n\tpfree(str.data);\n\n\treturn retval;\n}\n\n/*\n * plphp_array_get_elem\n * \t\tReturn a pointer to the array element with the given key\n */\nzval *\nplphp_array_get_elem(zval *array, char *key)\n{\n\tzval\t **element;\n\n\tif (!array)\n\t\telog(ERROR, \"passed zval is not a valid pointer\");\n\tif (Z_TYPE_P(array) != IS_ARRAY)\n\t\telog(ERROR, \"passed zval is not an array\");\n\n\tif (zend_symtable_find(array->value.ht,\n\t\t\t\t\t \t key,\n\t\t\t\t\t strlen(key) + 1,\n\t\t\t\t\t (void **) &element) != SUCCESS)\n\t\treturn NULL;\n\n\treturn element[0];\n}\n\n/*\n * zval_get_cstring\n *\t\tGet a C-string representation of a zval.\n *\n * All return values, except those that are NULL, are palloc'ed in the current\n * memory context and must be freed by the caller.\n *\n * If the do_array parameter is false, then array values will not be converted\n * and an error will be raised instead.\n *\n * If the null_ok parameter is true, we will return NULL for a NULL zval.\n * Otherwise we raise an error.\n */\nchar *\nplphp_zval_get_cstring(zval *val, bool do_array, bool null_ok)\n{\n\tchar *ret;\n\n\tif (!val)\n\t{\n\t\tif (null_ok)\n\t\t\treturn NULL;\n\t\telse\n\t\t\telog(ERROR, \"invalid zval pointer\");\n\t}\n\n\tswitch (Z_TYPE_P(val))\n\t{\n\t\tcase IS_NULL:\n\t\t\treturn NULL;\n\t\tcase IS_LONG:\n\t\t\tret = palloc(64);\n\t\t\tsnprintf(ret, 64, \"%ld\", Z_LVAL_P(val));\n\t\t\tbreak;\n\t\tcase IS_DOUBLE:\n\t\t\tret = palloc(64);\n\t\t\tsnprintf(ret, 64, \"%f\", Z_DVAL_P(val));\n\t\t\tbreak;\n\t\tcase IS_BOOL:\n\t\t\tret = palloc(8);\n\t\t\tsnprintf(ret, 8, \"%s\", Z_BVAL_P(val) ? \"true\": \"false\");\n\t\t\tbreak;\n\t\tcase IS_STRING:\n\t\t\tret = palloc(Z_STRLEN_P(val) + 1);\n\t\t\tsnprintf(ret, Z_STRLEN_P(val) + 1, \"%s\", \n\t\t\t\t\t Z_STRVAL_P(val));\n\t\t\tbreak;\n\t\tcase IS_ARRAY:\n\t\t\tif (!do_array)\n\t\t\t\telog(ERROR, \"can't stringize array value\");\n\t\t\tret = plphp_convert_to_pg_array(val);\n\t\t\tbreak;\n\t\tdefault:\n\t\t\t/* keep compiler quiet */\n\t\t\tret = NULL;\n\t\t\telog(ERROR, \"can't stringize value of type %d\", val->type);\n\t}\n\n\treturn ret;\n}\n\n/*\n * plphp_build_tuple_argument\n *\n * Build a PHP array from all attributes of a given tuple\n */\nzval *\nplphp_build_tuple_argument(HeapTuple tuple, TupleDesc tupdesc)\n{\n\tint\t\t\ti;\n\tzval\t *output;\n\tDatum\t\tattr;\n\tbool\t\tisnull;\n\tchar\t *attname;\n\tchar\t *outputstr;\n\tHeapTuple\ttypeTup;\n\tOid\t\t\ttypoutput;\n\tOid\t\t\ttypioparam;\n\n\tMAKE_STD_ZVAL(output);\n\tarray_init(output);\n\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\t/* Ignore dropped attributes */\n\t\tif (tupdesc->attrs[i]->attisdropped)\n\t\t\tcontinue;\n\n\t\t/* Get the attribute name */\n\t\tattname = tupdesc->attrs[i]->attname.data;\n\n\t\t/* Get the attribute value */\n\t\tattr = heap_getattr(tuple, i + 1, tupdesc, &isnull);\n\n\t\t/* If it is null, set it to undef in the hash. */\n\t\tif (isnull)\n\t\t{\n\t\t\tadd_next_index_unset(output);\n\t\t\tcontinue;\n\t\t}\n\n\t\t/*\n\t\t * Lookup the attribute type in the syscache for the output function\n\t\t */\n\t\ttypeTup = SearchSysCache(TYPEOID,\n\t\t\t\t\t\t\t\t ObjectIdGetDatum(tupdesc->attrs[i]->atttypid),\n\t\t\t\t\t\t\t\t 0, 0, 0);\n\t\tif (!HeapTupleIsValid(typeTup))\n\t\t{\n\t\t\telog(ERROR, \"cache lookup failed for type %u\",\n\t\t\t\t tupdesc->attrs[i]->atttypid);\n\t\t}\n\n\t\ttypoutput = ((Form_pg_type) GETSTRUCT(typeTup))->typoutput;\n\t\ttypioparam = getTypeIOParam(typeTup);\n\t\tReleaseSysCache(typeTup);\n\n\t\t/* Append the attribute name and the value to the list. */\n\t\toutputstr =\n\t\t\tDatumGetCString(OidFunctionCall3(typoutput, attr,\n\t\t\t\t\t\t\t\t\t\t\t ObjectIdGetDatum(typioparam),\n\t\t\t\t\t\t\t\t\t\t\t Int32GetDatum(tupdesc->attrs[i]->atttypmod)));\n\t\tadd_assoc_string(output, attname, outputstr, 1);\n\t\tpfree(outputstr);\n\t}\n\n\treturn output;\n}\n\n/*\n * plphp_modify_tuple\n * \t\tReturn the modified NEW tuple, for use as return value in a BEFORE\n * \t\ttrigger. outdata must point to the $_TD variable from the PHP\n * \t\tfunction.\n *\n * The tuple will be allocated in the current memory context and must be freed\n * by the caller.\n *\n * XXX Possible optimization: make this a global context that is not deleted,\n * but only reset each time this function is called. (Think about triggers\n * calling other triggers though).\n */\nHeapTuple\nplphp_modify_tuple(zval *outdata, TriggerData *tdata)\n{\n\tTupleDesc\ttupdesc;\n\tHeapTuple\trettuple;\n\tzval\t *newtup;\n\tzval\t **element;\n\tchar\t **vals;\n\tint\t\t\ti;\n\tAttInMetadata *attinmeta;\n\tMemoryContext tmpcxt,\n\t\t\t\t oldcxt;\n\n\ttmpcxt = AllocSetContextCreate(CurTransactionContext,\n\t\t\t\t\t\t\t\t \"PL/php NEW context\",\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MINSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_INITSIZE,\n\t\t\t\t\t\t\t\t ALLOCSET_DEFAULT_MAXSIZE);\n\n\toldcxt = MemoryContextSwitchTo(tmpcxt);\n\n\t/* Fetch \"new\" from $_TD */\n\tif (zend_hash_find(outdata->value.ht,\n\t\t\t\t\t \"new\", strlen(\"new\") + 1,\n\t\t\t\t\t (void **) &element) != SUCCESS)\n\t\telog(ERROR, \"$_TD['new'] not found\");\n\n\tif (Z_TYPE_P(element[0]) != IS_ARRAY)\n\t\telog(ERROR, \"$_TD['new'] must be an array\");\n\tnewtup = element[0];\n\n\t/* Fetch the tupledesc and metadata */\n\ttupdesc = tdata->tg_relation->rd_att;\n\tattinmeta = TupleDescGetAttInMetadata(tupdesc);\n\n\ti = zend_hash_num_elements(Z_ARRVAL_P(newtup));\n\n\tif (tupdesc->natts > i)\n\t\tereport(ERROR,\n\t\t\t\t(errmsg(\"insufficient number of keys in $_TD['new']\"),\n\t\t\t\t errdetail(\"At least %d expected, %d found.\",\n\t\t\t\t\t\t tupdesc->natts, i)));\n\n\tvals = (char **) palloc(tupdesc->natts * sizeof(char *));\n\n\t/*\n\t * For each attribute in the tupledesc, get its value from newtup and put\n\t * it in an array of cstrings.\n\t */\n\tfor (i = 0; i < tupdesc->natts; i++)\n\t{\n\t\tzval **element;\n\t\tchar *attname = NameStr(tupdesc->attrs[i]->attname);\n\n\t\t/* Fetch the attribute value from the zval */\n\t\tif (zend_symtable_find(newtup->value.ht, attname, strlen(attname) + 1,\n\t\t\t\t\t\t \t (void **) &element) != SUCCESS)\n\t\t\telog(ERROR, \"$_TD['new'] does not contain attribute \\\"%s\\\"\",\n\t\t\t\t attname);\n\n\t\tvals[i] = plphp_zval_get_cstring(element[0], true, true);\n\t}\n\n\t/* Return to the original context so that the new tuple will survive */\n\tMemoryContextSwitchTo(oldcxt);\n\n\t/* Build the tuple */\n\trettuple = BuildTupleFromCStrings(attinmeta, vals);\n\n\t/* Free the memory used */\n\tMemoryContextDelete(tmpcxt);\n\n\treturn rettuple;\n}\n\n/*\n * vim:ts=4:sw=4:cino=(0\n */\n", "plphp_spi.c": "/**********************************************************************\n * plphp_spi.c - SPI-related functions for PL/php.\n *\n * This software is copyright (c) Command Prompt Inc.\n *\n * The author hereby grants permission to use, copy, modify,\n * distribute, and license this software and its documentation for any\n * purpose, provided that existing copyright notices are retained in\n * all copies and that this notice is included verbatim in any\n * distributions. No written agreement, license, or royalty fee is\n * required for any of the authorized uses. Modifications to this\n * software may be copyrighted by their author and need not follow the\n * licensing terms described here, provided that the new terms are\n * clearly indicated on the first page of each file where they apply.\n *\n * IN NO EVENT SHALL THE AUTHOR OR DISTRIBUTORS BE LIABLE TO ANY PARTY\n * FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES\n * ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY\n * DERIVATIVES THEREOF, EVEN IF THE AUTHOR HAVE BEEN ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n *\n * THE AUTHOR AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,\n * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND\n * NON-INFRINGEMENT. THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS,\n * AND THE AUTHOR AND DISTRIBUTORS HAVE NO OBLIGATION TO PROVIDE\n * MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.\n *\n * IDENTIFICATION\n *\t\t$Id$\n *********************************************************************\n */\n\n#include \"postgres.h\"\n#include \"plphp_spi.h\"\n#include \"plphp_io.h\"\n\n/* PHP stuff */\n#include \"php.h\"\n\n/* PostgreSQL stuff */\n#include \"access/xact.h\"\n#include \"access/htup_details.h\"\n#include \"miscadmin.h\"\n\n#undef DEBUG_PLPHP_MEMORY\n\n#ifdef DEBUG_PLPHP_MEMORY\n#define REPORT_PHP_MEMUSAGE(where) \\\n\telog(NOTICE, \"PHP mem usage: \u00ab%s\u00bb: %u\", where, AG(allocated_memory));\n#else\n#define REPORT_PHP_MEMUSAGE(a) \n#endif\n\n/* resource type Id for SPIresult */\nint SPIres_rtype;\n\n/* SPI function table */\nzend_function_entry spi_functions[] =\n{\n\tZEND_FE(spi_exec, NULL)\n\tZEND_FE(spi_fetch_row, NULL)\n\tZEND_FE(spi_processed, NULL)\n\tZEND_FE(spi_status, NULL)\n\tZEND_FE(spi_rewind, NULL)\n\tZEND_FE(pg_raise, NULL)\n\tZEND_FE(return_next, NULL)\n\t{NULL, NULL, NULL}\n};\n\n/* SRF support: */\nFunctionCallInfo current_fcinfo = NULL;\nTupleDesc current_tupledesc = NULL;\nAttInMetadata *current_attinmeta = NULL;\nMemoryContext current_memcxt = NULL;\nTuplestorestate *current_tuplestore = NULL;\n\n\n/* A symbol table to save for return_next for the RETURNS TABLE case */\nHashTable *saved_symbol_table;\n\nstatic zval *get_table_arguments(AttInMetadata *attinmeta);\n\n/*\n * spi_exec\n * \t\tPL/php equivalent to SPI_exec().\n *\n * This function creates and return a PHP resource which describes the result\n * of a user-specified query. If the query returns tuples, it's possible to\n * retrieve them by using spi_fetch_row.\n *\n * Receives one or two arguments. The mandatory first argument is the query\n * text. The optional second argument is the tuple limit.\n *\n * Note that just like PL/Perl, we start a subtransaction before invoking the\n * SPI call, and automatically roll it back if the call fails.\n */\nZEND_FUNCTION(spi_exec)\n{\n\tchar\t *query;\n\tint\t\t\tquery_len;\n\tlong\t\tstatus;\n\tlong\t\tlimit;\n\tphp_SPIresult *SPIres;\n\tint\t\t\tspi_id;\n\tMemoryContext oldcontext = CurrentMemoryContext;\n\tResourceOwner oldowner = CurrentResourceOwner;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_exec called\");\n\n\tif ((ZEND_NUM_ARGS() > 2) || (ZEND_NUM_ARGS() < 1))\n\t\tWRONG_PARAM_COUNT;\n\n\t/* Parse arguments */\n\tif (ZEND_NUM_ARGS() == 2)\n\t{\n\t\tif (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"sl\",\n\t\t\t\t\t\t\t\t &query, &query_len, &limit) == FAILURE)\n\t\t{\n\t\t\tzend_error(E_WARNING, \"Can not parse parameters in %s\",\n\t\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\t\tRETURN_FALSE;\n\t\t}\n\t}\n\telse if (ZEND_NUM_ARGS() == 1)\n\t{\n\t\tif (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"s\",\n\t\t\t\t\t\t\t\t &query, &query_len) == FAILURE)\n\t\t{\n\t\t\tzend_error(E_WARNING, \"Can not parse parameters in %s\",\n\t\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\t\tRETURN_FALSE;\n\t\t}\n\t\tlimit = 0;\n\t}\n\telse\n\t{\n\t\tzend_error(E_WARNING, \"Incorrect number of parameters to %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tBeginInternalSubTransaction(NULL);\n\tMemoryContextSwitchTo(oldcontext);\n\n\t/* Call SPI */\n\tPG_TRY();\n\t{\n\t\tstatus = SPI_exec(query, limit);\n\n\t\tReleaseCurrentSubTransaction();\n\t\tMemoryContextSwitchTo(oldcontext);\n\t\tCurrentResourceOwner = oldowner;\n\n\t\t/*\n\t\t * AtEOSubXact_SPI() should not have popped any SPI context, but just\n\t\t * in case it did, make sure we remain connected.\n\t\t */\n\t\tSPI_restore_connection();\n\t}\n\tPG_CATCH();\n\t{\n\t\tErrorData\t*edata;\n\n\t\t/* Save error info */\n\t\tMemoryContextSwitchTo(oldcontext);\n\t\tedata = CopyErrorData();\n\t\tFlushErrorState();\n\n\t\t/* Abort the inner trasaction */\n\t\tRollbackAndReleaseCurrentSubTransaction();\n\t\tMemoryContextSwitchTo(oldcontext);\n\t\tCurrentResourceOwner = oldowner;\n\n\t\t/*\n\t\t * If AtEOSubXact_SPI() popped any SPI context of the subxact, it will\n\t\t * have left us in a disconnected state. We need this hack to return\n\t\t * to connected state.\n\t\t */\n\t\tSPI_restore_connection();\n\n\t\t/* bail PHP out */\n\t\tzend_error(E_ERROR, \"%s\", strdup(edata->message));\n\n\t\t/* Can't get here, but keep compiler quiet */\n\t\treturn;\n\t}\n\tPG_END_TRY();\n\n\t/* This malloc'ed chunk is freed in php_SPIresult_destroy */\n\tSPIres = (php_SPIresult *) malloc(sizeof(php_SPIresult));\n\tif (!SPIres)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_OUT_OF_MEMORY),\n\t\t\t\t errmsg(\"out of memory\")));\n\n\t/* Prepare the return resource */\n\tSPIres->SPI_processed = SPI_processed;\n\tif (status == SPI_OK_SELECT)\n\t\tSPIres->SPI_tuptable = SPI_tuptable;\n\telse\n\t\tSPIres->SPI_tuptable = NULL;\n\tSPIres->current_row = 0;\n\tSPIres->status = status;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_exec: creating resource\");\n\n\t/* Register the resource to PHP so it will be able to free it */\n\tspi_id = ZEND_REGISTER_RESOURCE(return_value, (void *) SPIres,\n\t\t\t\t\t \t\t\t\tSPIres_rtype);\n\n\tREPORT_PHP_MEMUSAGE(\"spi_exec: returning\");\n\n\tRETURN_RESOURCE(spi_id);\n}\n\n/*\n * spi_fetch_row\n * \t\tGrab a row from a SPI result (from spi_exec).\n *\n * This function receives a resource Id and returns a PHP hash representing the\n * next tuple in the result, or false if no tuples remain.\n *\n * XXX Apparently this is leaking memory. How do we tell PHP to free the tuple\n * once the user is done with it?\n */\nZEND_FUNCTION(spi_fetch_row)\n{\n\tzval\t *row = NULL;\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_fetch_row: called\");\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Can not parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tif (SPIres->status != SPI_OK_SELECT)\n\t{\n\t\tzend_error(E_WARNING, \"SPI status is not good\");\n\t\tRETURN_FALSE;\n\t}\n\n\tif (SPIres->current_row < SPIres->SPI_processed)\n\t{\n\t\trow = plphp_zval_from_tuple(SPIres->SPI_tuptable->vals[SPIres->current_row],\n\t\t\t \t\t\t\t\t\tSPIres->SPI_tuptable->tupdesc);\n\t\tSPIres->current_row++;\n\n\t\t*return_value = *row;\n\n\t\tzval_copy_ctor(return_value);\n\t\tzval_dtor(row);\n\t\tFREE_ZVAL(row);\n\n\t}\n\telse\n\t\tRETURN_FALSE;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_fetch_row: finish\");\n}\n\n/*\n * spi_processed\n * \t\tReturn the number of tuples returned in a spi_exec call.\n */\nZEND_FUNCTION(spi_processed)\n{\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_processed: start\");\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tREPORT_PHP_MEMUSAGE(\"spi_processed: finish\");\n\n\tRETURN_LONG(SPIres->SPI_processed);\n}\n\n/*\n * spi_status\n * \t\tReturn the status returned by a previous spi_exec call, as a string.\n */\nZEND_FUNCTION(spi_status)\n{\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tREPORT_PHP_MEMUSAGE(\"spi_status: start\");\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tREPORT_PHP_MEMUSAGE(\"spi_status: finish\");\n\n\t/*\n\t * XXX The cast is wrong, but we use it to prevent a compiler warning.\n\t * Note that the second parameter to RETURN_STRING is \"duplicate\", so\n\t * we are returning a copy of the string anyway.\n\t */\n\tRETURN_STRING((char *) SPI_result_code_string(SPIres->status), true);\n}\n\n/*\n * spi_rewind\n * \t\tResets the internal counter for spi_fetch_row, so the next\n * \t\tspi_fetch_row call will start fetching from the beginning.\n */\nZEND_FUNCTION(spi_rewind)\n{\n\tzval\t **z_spi = NULL;\n\tphp_SPIresult\t*SPIres;\n\n\tif (ZEND_NUM_ARGS() != 1)\n\t\tWRONG_PARAM_COUNT;\n\n\tif (zend_get_parameters_ex(1, &z_spi) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"Cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\tif (z_spi == NULL)\n\t{\n\t\tzend_error(E_WARNING, \"Could not get SPI resource in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t\tRETURN_FALSE;\n\t}\n\n\tZEND_FETCH_RESOURCE(SPIres, php_SPIresult *, z_spi, -1, \"SPI result\",\n\t\t\t\t\t\tSPIres_rtype);\n\n\tSPIres->current_row = 0;\n\n\tRETURN_NULL();\n}\n/*\n * pg_raise\n * User-callable function for sending messages to the Postgres log.\n */\nZEND_FUNCTION(pg_raise)\n{\n\tchar *level = NULL,\n\t\t\t *message = NULL;\n\tint level_len,\n\t\t\t\tmessage_len,\n\t\t\t\televel = 0;\n\n\tif (ZEND_NUM_ARGS() != 2)\n\t{\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t errmsg(\"wrong number of arguments to %s\", \"pg_raise\")));\n\t}\n\n\tif (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"ss\",\n\t\t\t\t\t\t\t &level, &level_len,\n\t\t\t\t\t\t\t &message, &message_len) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t}\n\n\tif (strcasecmp(level, \"ERROR\") == 0)\n\t\televel = E_ERROR;\n\telse if (strcasecmp(level, \"WARNING\") == 0)\n\t\televel = E_WARNING;\n\telse if (strcasecmp(level, \"NOTICE\") == 0)\n\t\televel = E_NOTICE;\n\telse\n\t\tzend_error(E_ERROR, \"incorrect log level\");\n\n\tzend_error(elevel, \"%s\", message);\n}\n\n/*\n * return_next\n * \t\tAdd a tuple to the current tuplestore\n */\nZEND_FUNCTION(return_next)\n{\n\tMemoryContext\toldcxt;\n\tzval\t *param;\n\tHeapTuple\ttup;\n\tReturnSetInfo *rsi;\n\t\n\t/*\n\t * Disallow use of return_next inside non-SRF functions\n\t */\n\tif (current_fcinfo == NULL || current_fcinfo->flinfo == NULL || \n\t\t!current_fcinfo->flinfo->fn_retset)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),\n\t\t\t\t errmsg(\"cannot use return_next in functions not declared to \"\n\t\t\t\t\t\t\"return a set\")));\n\n\trsi = (ReturnSetInfo *) current_fcinfo->resultinfo;\n\n\tAssert(current_tupledesc != NULL);\n\tAssert(rsi != NULL);\n\t\n\tif (ZEND_NUM_ARGS() > 1)\n\t\tereport(ERROR,\n\t\t\t\t(errcode(ERRCODE_SYNTAX_ERROR),\n\t\t\t\t errmsg(\"wrong number of arguments to %s\", \"return_next\")));\n\n\tif (ZEND_NUM_ARGS() == 0)\n\t{\n\t\t/* \n\t\t * Called from the function declared with RETURNS TABLE \n\t */\n\t\tparam = get_table_arguments(current_attinmeta);\n\t}\n\telse if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, \"z\",\n\t\t\t\t\t\t\t ¶m) == FAILURE)\n\t{\n\t\tzend_error(E_WARNING, \"cannot parse parameters in %s\",\n\t\t\t\t get_active_function_name(TSRMLS_C));\n\t}\n\n\t/* Use the per-query context so that the tuplestore survives */\n\toldcxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory);\n\n\t/* Form the tuple */\n\ttup = plphp_srf_htup_from_zval(param, current_attinmeta, current_memcxt);\n\n\t/* First call? Create the tuplestore. */\n\tif (!current_tuplestore)\n\t\tcurrent_tuplestore = tuplestore_begin_heap(true, false, work_mem);\n\n\t/* Save the tuple and clean up */\n\ttuplestore_puttuple(current_tuplestore, tup);\n\theap_freetuple(tup);\n\n\tMemoryContextSwitchTo(oldcxt);\n}\n\n/*\n * php_SPIresult_destroy\n * \t\tFree the resources allocated by a spi_exec call.\n *\n * This is automatically called when the resource goes out of scope\n * or is overwritten by another resource.\n */\nvoid\nphp_SPIresult_destroy(zend_rsrc_list_entry *rsrc TSRMLS_DC)\n{\n\tphp_SPIresult *res = (php_SPIresult *) rsrc->ptr;\n\n\tif (res->SPI_tuptable != NULL)\n\t\tSPI_freetuptable(res->SPI_tuptable);\n\n\tfree(res);\n}\n\n/* Return an array of TABLE argument values for return_next */\nstatic\nzval *get_table_arguments(AttInMetadata *attinmeta)\n{\n\tzval *retval = NULL;\n\tint\t\ti;\n\t\n\tMAKE_STD_ZVAL(retval);\n\tarray_init(retval);\n\n\tAssert(attinmeta->tupdesc);\n\tAssert(saved_symbol_table != NULL);\n\t/* Extract OUT argument names */\n\tfor (i = 0; i < attinmeta->tupdesc->natts; i++)\n\t{\n\t\tzval \t**val;\n\t\tchar \t*attname;\n\n\t\tAssert(!attinmeta->tupdesc->attrs[i]->attisdropped);\n\n\t\tattname = NameStr(attinmeta->tupdesc->attrs[i]->attname);\n\n\t\tif (zend_hash_find(saved_symbol_table, \n\t\t\t\t\t\t attname, strlen(attname) + 1,\n\t\t\t\t\t\t (void **)&val) == SUCCESS)\n\n\t\t\tadd_next_index_zval(retval, *val);\n\t\telse\n\t\t\tadd_next_index_unset(retval);\n\t} \n\treturn retval;\n}\n\n\n/*\n * vim:ts=4:sw=4:cino=(0\n */\n"}}
-{"repo": "binarylogic/coercionlogic", "pr_number": 1, "title": "Fix: require the correct file", "state": "open", "merged_at": null, "additions": 1, "deletions": 1, "files_changed": ["lib/coercionlogic.rb"], "files_before": {"lib/coercionlogic.rb": "require 'activerecord'\n\nmodule Coercionlogic\n def write_attribute_with_coercion(attr_name, value)\n value = nil if value.is_a?(String) && value.blank?\n write_attribute_without_coercion(attr_name, value)\n end\nend\n\nActiveRecord::Base.class_eval do\n include Coercionlogic\n alias_method_chain :write_attribute, :coercion\nend"}, "files_after": {"lib/coercionlogic.rb": "require 'active_record'\n\nmodule Coercionlogic\n def write_attribute_with_coercion(attr_name, value)\n value = nil if value.is_a?(String) && value.blank?\n write_attribute_without_coercion(attr_name, value)\n end\nend\n\nActiveRecord::Base.class_eval do\n include Coercionlogic\n alias_method_chain :write_attribute, :coercion\nend"}}
-{"repo": "Skarabaeus/ImageColorPicker", "pr_number": 6, "title": "Unbind load event on destroy", "state": "open", "merged_at": null, "additions": 3, "deletions": 1, "files_changed": ["dist/jquery.ImageColorPicker.js", "dist/jquery.ImageColorPicker.min.js", "src/ImageColorPicker.js"], "files_before": {"dist/jquery.ImageColorPicker.js": "/*!\n* jQuery ImageColorPicker Plugin v0.2\n* http://github.com/Skarabaeus/ImageColorPicker\n*\n* Copyright 2010, Stefan Siebel\n* Licensed under the MIT license.\n* http://github.com/Skarabaeus/ImageColorPicker/MIT-LICENSE.txt\n* \n* Released under the MIT\n*\n* Date: Tue May 17 11:20:16 2011 -0700\n*/\n(function(){\nvar uiImageColorPicker = function(){\n\n\tvar _d2h = function(d) {\n\t\tvar result;\n\t\tif (! isNaN( parseInt(d) ) ) {\n\t\t\tresult = parseInt(d).toString(16);\n\t\t} else {\n\t\t\tresult = d;\n\t\t}\n\n\t\tif (result.length === 1) {\n\t\t\tresult = \"0\" + result;\n\t\t}\n\t\treturn result;\n\t};\n\n\tvar _h2d = function(h) {\n\t\treturn parseInt(h,16);\n\t};\n\n\tvar _createImageColorPicker = function(widget) {\n\t\t// store 2D context in widget for later access\n\t\twidget.ctx = null;\n\n\t\t// rgb\n\t\twidget.color = [0, 0, 0];\n\n\t\t// create additional DOM elements.\n\t\twidget.$canvas = $('');\n\n\t\t// add them to the DOM\n\t\twidget.element.wrap('');\n\t\twidget.$wrapper = widget.element.parent();\n\t\twidget.$wrapper.append(widget.$canvas);\n\n\t\tif (typeof(widget.$canvas.get(0).getContext) === 'function') { // FF, Chrome, ...\n\t\t\twidget.ctx = widget.$canvas.get(0).getContext('2d');\n\n\t\t// this does not work yet!\n\t\t} else {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Can't get canvas context. Use \"\n\t\t\t\t\t+ \"Firefox, Chrome or include excanvas to your project.\");\n\t\t\t}\n\n\t\t}\n\n\t\t// draw the image in the canvas\n\t\tvar img = new Image();\n\t\timg.src = widget.element.attr(\"src\");\n\t\twidget.$canvas.attr(\"width\", img.width);\n\t\twidget.$canvas.attr(\"height\", img.height);\n\t\twidget.ctx.drawImage(img, 0, 0);\n\n\t\t// get the image data.\n\t\ttry {\n\t\t\ttry {\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t} catch (e1) {\n\t\t\t\tnetscape.security.PrivilegeManager.enablePrivilege(\"UniversalBrowserRead\");\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t}\n\t\t} catch (e2) {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Unable to access image data. \"\n\t\t\t\t\t+ \"This could be either due \"\n\t\t\t\t\t+ \"to the browser you are using (IE doesn't work) or image and script \"\n\t\t\t\t\t+ \"are saved on different servers or you run the script locally. \");\n\t\t\t}\n\t\t}\n\n\t\t// hide the original image\n\t\twidget.element.hide();\n\n\t\t// for usage in events\n\t\tvar that = widget;\n\n\t\twidget.$canvas.bind(\"mousemove\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateCurrentColor( that, color.red, color.green, color.blue );\n\t\t});\n\n\t\twidget.$canvas.bind(\"click\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateSelectedColor( that, color.red, color.green, color.blue );\n\t\t\tthat._trigger(\"afterColorSelected\", 0, that.selectedColor());\n\t\t});\n\n\t\twidget.$canvas.bind(\"mouseleave\", function(e){\n\t\t\tupdateCurrentColor(that, 255, 255, 255);\n\t\t});\n\n\t\t// hope that helps to prevent memory leaks\n\t\t$(window).unload(function(e){\n\t\t\tthat.destroy();\n\t\t});\n\t};\n\n // for pageX and pageY, determine image coordinates using offset\n var imageCoordinates = function( widget, pageX, pageY ) {\n var offset = widget.$canvas.offset();\n\n return { x: Math.round( pageX - offset.left ),\n y: Math.round( pageY - offset.top ) };\n }\n\n // lookup color values for point [x,y] location in image\n var lookupColor = function( imageData, point) {\n var pixel = ((point.y * imageData.width) + point.x) * 4;\n\n return { red: imageData.data[pixel],\n green: imageData.data[(pixel + 1)],\n blue: imageData.data[(pixel + 2)] }\n\n }\n\n\tvar updateCurrentColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\t}\n\n\tvar updateSelectedColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// set new selected color\n\t\tvar newColor = [red, green, blue];\n\t\twidget.color = newColor;\n\t}\n\n\treturn {\n\t\t// default options\n\t\toptions: {\n\n\t\t},\n\n\t\t_create: function() {\n\t\t\tif (this.element.get(0).tagName.toLowerCase() === 'img') {\n\t\t\t\tif (this.element.get(0).complete) {\n\t\t\t\t\t_createImageColorPicker(this);\n\t\t\t\t} else {\n\t\t\t\t\tthis.element.bind('load', { that: this }, function(e){\n\t\t\t\t\t\tvar that = e.data.that;\n\t\t\t\t\t\t_createImageColorPicker(that);\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\tdestroy: function() {\n\t\t\t// default destroy\n\t\t\t$.Widget.prototype.destroy.apply(this, arguments);\n\n\t\t\t// remove possible large array with pixel data\n\t\t\tthis.imageData = null;\n\n\t\t\t// remove additional elements\n\t\t\tthis.$canvas.remove();\n\t\t\tthis.element.unwrap();\n\t\t\tthis.element.show();\n\t\t},\n\n\t\tselectedColor: function() {\n\t\t\treturn \"#\" + _d2h(this.color[0]) + _d2h(this.color[1]) + _d2h(this.color[2]);\n\t\t}\n\n\t};\n}();\n\t$.widget(\"ui.ImageColorPicker\", uiImageColorPicker);\n})();\n", "dist/jquery.ImageColorPicker.min.js": "/*!\n* jQuery ImageColorPicker Plugin v0.2\n* http://github.com/Skarabaeus/ImageColorPicker\n*\n* Copyright 2010, Stefan Siebel\n* Licensed under the MIT license.\n* http://github.com/Skarabaeus/ImageColorPicker/MIT-LICENSE.txt\n* \n* Released under the MIT\n*\n* Date: Tue May 17 11:20:16 2011 -0700\n*/\n(function(){var n=function(){var g=function(a){a=isNaN(parseInt(a))?a:parseInt(a).toString(16);if(a.length===1)a=\"0\"+a;return a},k=function(a){a.ctx=null;a.color=[0,0,0];a.$canvas=$('');a.element.wrap('');a.$wrapper=a.element.parent();a.$wrapper.append(a.$canvas);if(typeof a.$canvas.get(0).getContext===\"function\")a.ctx=a.$canvas.get(0).getContext(\"2d\");else{a.destroy();console&&console.log(\"ImageColor Picker: Can't get canvas context. Use Firefox, Chrome or include excanvas to your project.\")}var c=\nnew Image;c.src=a.element.attr(\"src\");a.$canvas.attr(\"width\",c.width);a.$canvas.attr(\"height\",c.height);a.ctx.drawImage(c,0,0);try{try{a.imageData=a.ctx.getImageData(0,0,c.width,c.height)}catch(d){netscape.security.PrivilegeManager.enablePrivilege(\"UniversalBrowserRead\");a.imageData=a.ctx.getImageData(0,0,c.width,c.height)}}catch(e){a.destroy();console&&console.log(\"ImageColor Picker: Unable to access image data. This could be either due to the browser you are using (IE doesn't work) or image and script are saved on different servers or you run the script locally. \")}a.element.hide();\na.$canvas.bind(\"mousemove\",function(b){b=h(a,b.pageX,b.pageY);b=i(a.imageData,b);j(a,b.red,b.green,b.blue)});a.$canvas.bind(\"click\",function(b){b=h(a,b.pageX,b.pageY);b=i(a.imageData,b);m(a,b.red,b.green,b.blue);a._trigger(\"afterColorSelected\",0,a.selectedColor())});a.$canvas.bind(\"mouseleave\",function(){j(a,255,255,255)});$(window).unload(function(){a.destroy()})},h=function(a,c,d){a=a.$canvas.offset();return{x:Math.round(c-a.left),y:Math.round(d-a.top)}},i=function(a,c){c=(c.y*a.width+c.x)*4;return{red:a.data[c],\ngreen:a.data[c+1],blue:a.data[c+2]}},j=function(a,c,d,e){var b=a.ctx,f=a.$canvas.attr(\"width\");a=a.$canvas.attr(\"height\");b.fillStyle=\"rgb(\"+c+\",\"+d+\",\"+e+\")\";b.fillRect(f-62,a-32,30,30);b.lineWidth=\"3\";b.lineJoin=\"round\";b.strokeRect(f-62,a-32,30,30)},m=function(a,c,d,e){var b=a.ctx,f=a.$canvas.attr(\"width\"),l=a.$canvas.attr(\"height\");b.fillStyle=\"rgb(\"+c+\",\"+d+\",\"+e+\")\";b.fillRect(f-32,l-32,30,30);b.lineWidth=\"3\";b.lineJoin=\"round\";b.strokeRect(f-32,l-32,30,30);a.color=[c,d,e]};return{options:{},\n_create:function(){if(this.element.get(0).tagName.toLowerCase()===\"img\")this.element.get(0).complete?k(this):this.element.bind(\"load\",{that:this},function(a){k(a.data.that)})},destroy:function(){$.Widget.prototype.destroy.apply(this,arguments);this.imageData=null;this.$canvas.remove();this.element.unwrap();this.element.show()},selectedColor:function(){return\"#\"+g(this.color[0])+g(this.color[1])+g(this.color[2])}}}();$.widget(\"ui.ImageColorPicker\",n)})();\n", "src/ImageColorPicker.js": "var uiImageColorPicker = function(){\n\n\tvar _d2h = function(d) {\n\t\tvar result;\n\t\tif (! isNaN( parseInt(d) ) ) {\n\t\t\tresult = parseInt(d).toString(16);\n\t\t} else {\n\t\t\tresult = d;\n\t\t}\n\n\t\tif (result.length === 1) {\n\t\t\tresult = \"0\" + result;\n\t\t}\n\t\treturn result;\n\t};\n\n\tvar _h2d = function(h) {\n\t\treturn parseInt(h,16);\n\t};\n\n\tvar _createImageColorPicker = function(widget) {\n\t\t// store 2D context in widget for later access\n\t\twidget.ctx = null;\n\n\t\t// rgb\n\t\twidget.color = [0, 0, 0];\n\n\t\t// create additional DOM elements.\n\t\twidget.$canvas = $('');\n\n\t\t// add them to the DOM\n\t\twidget.element.wrap('');\n\t\twidget.$wrapper = widget.element.parent();\n\t\twidget.$wrapper.append(widget.$canvas);\n\n\t\tif (typeof(widget.$canvas.get(0).getContext) === 'function') { // FF, Chrome, ...\n\t\t\twidget.ctx = widget.$canvas.get(0).getContext('2d');\n\n\t\t// this does not work yet!\n\t\t} else {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Can't get canvas context. Use \"\n\t\t\t\t\t+ \"Firefox, Chrome or include excanvas to your project.\");\n\t\t\t}\n\n\t\t}\n\n\t\t// draw the image in the canvas\n\t\tvar img = new Image();\n\t\timg.src = widget.element.attr(\"src\");\n\t\twidget.$canvas.attr(\"width\", img.width);\n\t\twidget.$canvas.attr(\"height\", img.height);\n\t\twidget.ctx.drawImage(img, 0, 0);\n\n\t\t// get the image data.\n\t\ttry {\n\t\t\ttry {\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t} catch (e1) {\n\t\t\t\tnetscape.security.PrivilegeManager.enablePrivilege(\"UniversalBrowserRead\");\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t}\n\t\t} catch (e2) {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Unable to access image data. \"\n\t\t\t\t\t+ \"This could be either due \"\n\t\t\t\t\t+ \"to the browser you are using (IE doesn't work) or image and script \"\n\t\t\t\t\t+ \"are saved on different servers or you run the script locally. \");\n\t\t\t}\n\t\t}\n\n\t\t// hide the original image\n\t\twidget.element.hide();\n\n\t\t// for usage in events\n\t\tvar that = widget;\n\n\t\twidget.$canvas.bind(\"mousemove\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateCurrentColor( that, color.red, color.green, color.blue );\n\t\t});\n\n\t\twidget.$canvas.bind(\"click\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateSelectedColor( that, color.red, color.green, color.blue );\n\t\t\tthat._trigger(\"afterColorSelected\", 0, that.selectedColor());\n\t\t});\n\n\t\twidget.$canvas.bind(\"mouseleave\", function(e){\n\t\t\tupdateCurrentColor(that, 255, 255, 255);\n\t\t});\n\n\t\t// hope that helps to prevent memory leaks\n\t\t$(window).unload(function(e){\n\t\t\tthat.destroy();\n\t\t});\n\t};\n\n // for pageX and pageY, determine image coordinates using offset\n var imageCoordinates = function( widget, pageX, pageY ) {\n var offset = widget.$canvas.offset();\n\n return { x: Math.round( pageX - offset.left ),\n y: Math.round( pageY - offset.top ) };\n }\n\n // lookup color values for point [x,y] location in image\n var lookupColor = function( imageData, point) {\n var pixel = ((point.y * imageData.width) + point.x) * 4;\n\n return { red: imageData.data[pixel],\n green: imageData.data[(pixel + 1)],\n blue: imageData.data[(pixel + 2)] }\n\n }\n\n\tvar updateCurrentColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\t}\n\n\tvar updateSelectedColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// set new selected color\n\t\tvar newColor = [red, green, blue];\n\t\twidget.color = newColor;\n\t}\n\n\treturn {\n\t\t// default options\n\t\toptions: {\n\n\t\t},\n\n\t\t_create: function() {\n\t\t\tif (this.element.get(0).tagName.toLowerCase() === 'img') {\n\t\t\t\tif (this.element.get(0).complete) {\n\t\t\t\t\t_createImageColorPicker(this);\n\t\t\t\t} else {\n\t\t\t\t\tthis.element.bind('load', { that: this }, function(e){\n\t\t\t\t\t\tvar that = e.data.that;\n\t\t\t\t\t\t_createImageColorPicker(that);\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\tdestroy: function() {\n\t\t\t// default destroy\n\t\t\t$.Widget.prototype.destroy.apply(this, arguments);\n\n\t\t\t// remove possible large array with pixel data\n\t\t\tthis.imageData = null;\n\n\t\t\t// remove additional elements\n\t\t\tthis.$canvas.remove();\n\t\t\tthis.element.unwrap();\n\t\t\tthis.element.show();\n\t\t},\n\n\t\tselectedColor: function() {\n\t\t\treturn \"#\" + _d2h(this.color[0]) + _d2h(this.color[1]) + _d2h(this.color[2]);\n\t\t}\n\n\t};\n}();\n"}, "files_after": {"dist/jquery.ImageColorPicker.js": "/*!\n* jQuery ImageColorPicker Plugin v0.2\n* http://github.com/Skarabaeus/ImageColorPicker\n*\n* Copyright 2010, Stefan Siebel\n* Licensed under the MIT license.\n* http://github.com/Skarabaeus/ImageColorPicker/MIT-LICENSE.txt\n* \n* Released under the MIT\n*\n* Date: Tue May 17 11:20:16 2011 -0700\n*/\n(function(){\nvar uiImageColorPicker = function(){\n\n\tvar _d2h = function(d) {\n\t\tvar result;\n\t\tif (! isNaN( parseInt(d) ) ) {\n\t\t\tresult = parseInt(d).toString(16);\n\t\t} else {\n\t\t\tresult = d;\n\t\t}\n\n\t\tif (result.length === 1) {\n\t\t\tresult = \"0\" + result;\n\t\t}\n\t\treturn result;\n\t};\n\n\tvar _h2d = function(h) {\n\t\treturn parseInt(h,16);\n\t};\n\n\tvar _createImageColorPicker = function(widget) {\n\t\t// store 2D context in widget for later access\n\t\twidget.ctx = null;\n\n\t\t// rgb\n\t\twidget.color = [0, 0, 0];\n\n\t\t// create additional DOM elements.\n\t\twidget.$canvas = $('');\n\n\t\t// add them to the DOM\n\t\twidget.element.wrap('');\n\t\twidget.$wrapper = widget.element.parent();\n\t\twidget.$wrapper.append(widget.$canvas);\n\n\t\tif (typeof(widget.$canvas.get(0).getContext) === 'function') { // FF, Chrome, ...\n\t\t\twidget.ctx = widget.$canvas.get(0).getContext('2d');\n\n\t\t// this does not work yet!\n\t\t} else {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Can't get canvas context. Use \"\n\t\t\t\t\t+ \"Firefox, Chrome or include excanvas to your project.\");\n\t\t\t}\n\n\t\t}\n\n\t\t// draw the image in the canvas\n\t\tvar img = new Image();\n\t\timg.src = widget.element.attr(\"src\");\n\t\twidget.$canvas.attr(\"width\", img.width);\n\t\twidget.$canvas.attr(\"height\", img.height);\n\t\twidget.ctx.drawImage(img, 0, 0);\n\n\t\t// get the image data.\n\t\ttry {\n\t\t\ttry {\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t} catch (e1) {\n\t\t\t\tnetscape.security.PrivilegeManager.enablePrivilege(\"UniversalBrowserRead\");\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t}\n\t\t} catch (e2) {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Unable to access image data. \"\n\t\t\t\t\t+ \"This could be either due \"\n\t\t\t\t\t+ \"to the browser you are using (IE doesn't work) or image and script \"\n\t\t\t\t\t+ \"are saved on different servers or you run the script locally. \");\n\t\t\t}\n\t\t}\n\n\t\t// hide the original image\n\t\twidget.element.hide();\n\n\t\t// for usage in events\n\t\tvar that = widget;\n\n\t\twidget.$canvas.bind(\"mousemove\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateCurrentColor( that, color.red, color.green, color.blue );\n\t\t});\n\n\t\twidget.$canvas.bind(\"click\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateSelectedColor( that, color.red, color.green, color.blue );\n\t\t\tthat._trigger(\"afterColorSelected\", 0, that.selectedColor());\n\t\t});\n\n\t\twidget.$canvas.bind(\"mouseleave\", function(e){\n\t\t\tupdateCurrentColor(that, 255, 255, 255);\n\t\t});\n\n\t\t// hope that helps to prevent memory leaks\n\t\t$(window).unload(function(e){\n\t\t\tthat.destroy();\n\t\t});\n\t};\n\n // for pageX and pageY, determine image coordinates using offset\n var imageCoordinates = function( widget, pageX, pageY ) {\n var offset = widget.$canvas.offset();\n\n return { x: Math.round( pageX - offset.left ),\n y: Math.round( pageY - offset.top ) };\n }\n\n // lookup color values for point [x,y] location in image\n var lookupColor = function( imageData, point) {\n var pixel = ((point.y * imageData.width) + point.x) * 4;\n\n return { red: imageData.data[pixel],\n green: imageData.data[(pixel + 1)],\n blue: imageData.data[(pixel + 2)] }\n\n }\n\n\tvar updateCurrentColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\t}\n\n\tvar updateSelectedColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// set new selected color\n\t\tvar newColor = [red, green, blue];\n\t\twidget.color = newColor;\n\t}\n\n\treturn {\n\t\t// default options\n\t\toptions: {\n\n\t\t},\n\n\t\t_create: function() {\n\t\t\tif (this.element.get(0).tagName.toLowerCase() === 'img') {\n\t\t\t\tif (this.element.get(0).complete) {\n\t\t\t\t\t_createImageColorPicker(this);\n\t\t\t\t} else {\n\t\t\t\t\tthis.element.bind('load', { that: this }, function(e){\n\t\t\t\t\t\tvar that = e.data.that;\n\t\t\t\t\t\t_createImageColorPicker(that);\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\tdestroy: function() {\n\t\t\t// default destroy\n\t\t\t$.Widget.prototype.destroy.apply(this, arguments);\n\n\t\t\t// remove possible large array with pixel data\n\t\t\tthis.imageData = null;\n\n\t\t\t// remove additional elements\n\t\t\tthis.$canvas.remove();\n\t\t\tthis.element.unbind('load');\n\t\t\tthis.element.unwrap();\n\t\t\tthis.element.show();\n\t\t},\n\n\t\tselectedColor: function() {\n\t\t\treturn \"#\" + _d2h(this.color[0]) + _d2h(this.color[1]) + _d2h(this.color[2]);\n\t\t}\n\n\t};\n}();\n\t$.widget(\"ui.ImageColorPicker\", uiImageColorPicker);\n})();\n", "dist/jquery.ImageColorPicker.min.js": "/*!\n* jQuery ImageColorPicker Plugin v0.2\n* http://github.com/Skarabaeus/ImageColorPicker\n*\n* Copyright 2010, Stefan Siebel\n* Licensed under the MIT license.\n* http://github.com/Skarabaeus/ImageColorPicker/MIT-LICENSE.txt\n* \n* Released under the MIT\n*\n* Date: Tue May 17 11:20:16 2011 -0700\n*/\n(function(){var n=function(){var g=function(a){a=isNaN(parseInt(a))?a:parseInt(a).toString(16);if(a.length===1)a=\"0\"+a;return a},k=function(a){a.ctx=null;a.color=[0,0,0];a.$canvas=$('');a.element.wrap('');a.$wrapper=a.element.parent();a.$wrapper.append(a.$canvas);if(typeof a.$canvas.get(0).getContext===\"function\")a.ctx=a.$canvas.get(0).getContext(\"2d\");else{a.destroy();console&&console.log(\"ImageColor Picker: Can't get canvas context. Use Firefox, Chrome or include excanvas to your project.\")}var c=\nnew Image;c.src=a.element.attr(\"src\");a.$canvas.attr(\"width\",c.width);a.$canvas.attr(\"height\",c.height);a.ctx.drawImage(c,0,0);try{try{a.imageData=a.ctx.getImageData(0,0,c.width,c.height)}catch(d){netscape.security.PrivilegeManager.enablePrivilege(\"UniversalBrowserRead\");a.imageData=a.ctx.getImageData(0,0,c.width,c.height)}}catch(e){a.destroy();console&&console.log(\"ImageColor Picker: Unable to access image data. This could be either due to the browser you are using (IE doesn't work) or image and script are saved on different servers or you run the script locally. \")}a.element.hide();\na.$canvas.bind(\"mousemove\",function(b){b=h(a,b.pageX,b.pageY);b=i(a.imageData,b);j(a,b.red,b.green,b.blue)});a.$canvas.bind(\"click\",function(b){b=h(a,b.pageX,b.pageY);b=i(a.imageData,b);m(a,b.red,b.green,b.blue);a._trigger(\"afterColorSelected\",0,a.selectedColor())});a.$canvas.bind(\"mouseleave\",function(){j(a,255,255,255)});$(window).unload(function(){a.destroy()})},h=function(a,c,d){a=a.$canvas.offset();return{x:Math.round(c-a.left),y:Math.round(d-a.top)}},i=function(a,c){c=(c.y*a.width+c.x)*4;return{red:a.data[c],\ngreen:a.data[c+1],blue:a.data[c+2]}},j=function(a,c,d,e){var b=a.ctx,f=a.$canvas.attr(\"width\");a=a.$canvas.attr(\"height\");b.fillStyle=\"rgb(\"+c+\",\"+d+\",\"+e+\")\";b.fillRect(f-62,a-32,30,30);b.lineWidth=\"3\";b.lineJoin=\"round\";b.strokeRect(f-62,a-32,30,30)},m=function(a,c,d,e){var b=a.ctx,f=a.$canvas.attr(\"width\"),l=a.$canvas.attr(\"height\");b.fillStyle=\"rgb(\"+c+\",\"+d+\",\"+e+\")\";b.fillRect(f-32,l-32,30,30);b.lineWidth=\"3\";b.lineJoin=\"round\";b.strokeRect(f-32,l-32,30,30);a.color=[c,d,e]};return{options:{},\n_create:function(){if(this.element.get(0).tagName.toLowerCase()===\"img\")this.element.get(0).complete?k(this):this.element.bind(\"load\",{that:this},function(a){k(a.data.that)})},destroy:function(){$.Widget.prototype.destroy.apply(this,arguments);this.imageData=null;this.$canvas.remove();this.element.unbind('load');this.element.unwrap();this.element.show()},selectedColor:function(){return\"#\"+g(this.color[0])+g(this.color[1])+g(this.color[2])}}}();$.widget(\"ui.ImageColorPicker\",n)})();\n", "src/ImageColorPicker.js": "var uiImageColorPicker = function(){\n\n\tvar _d2h = function(d) {\n\t\tvar result;\n\t\tif (! isNaN( parseInt(d) ) ) {\n\t\t\tresult = parseInt(d).toString(16);\n\t\t} else {\n\t\t\tresult = d;\n\t\t}\n\n\t\tif (result.length === 1) {\n\t\t\tresult = \"0\" + result;\n\t\t}\n\t\treturn result;\n\t};\n\n\tvar _h2d = function(h) {\n\t\treturn parseInt(h,16);\n\t};\n\n\tvar _createImageColorPicker = function(widget) {\n\t\t// store 2D context in widget for later access\n\t\twidget.ctx = null;\n\n\t\t// rgb\n\t\twidget.color = [0, 0, 0];\n\n\t\t// create additional DOM elements.\n\t\twidget.$canvas = $('');\n\n\t\t// add them to the DOM\n\t\twidget.element.wrap('');\n\t\twidget.$wrapper = widget.element.parent();\n\t\twidget.$wrapper.append(widget.$canvas);\n\n\t\tif (typeof(widget.$canvas.get(0).getContext) === 'function') { // FF, Chrome, ...\n\t\t\twidget.ctx = widget.$canvas.get(0).getContext('2d');\n\n\t\t// this does not work yet!\n\t\t} else {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Can't get canvas context. Use \"\n\t\t\t\t\t+ \"Firefox, Chrome or include excanvas to your project.\");\n\t\t\t}\n\n\t\t}\n\n\t\t// draw the image in the canvas\n\t\tvar img = new Image();\n\t\timg.src = widget.element.attr(\"src\");\n\t\twidget.$canvas.attr(\"width\", img.width);\n\t\twidget.$canvas.attr(\"height\", img.height);\n\t\twidget.ctx.drawImage(img, 0, 0);\n\n\t\t// get the image data.\n\t\ttry {\n\t\t\ttry {\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t} catch (e1) {\n\t\t\t\tnetscape.security.PrivilegeManager.enablePrivilege(\"UniversalBrowserRead\");\n\t\t\t\twidget.imageData = widget.ctx.getImageData(0, 0, img.width, img.height);\n\t\t\t}\n\t\t} catch (e2) {\n\t\t\twidget.destroy();\n\t\t\tif (console) {\n\t\t\t\tconsole.log(\"ImageColor Picker: Unable to access image data. \"\n\t\t\t\t\t+ \"This could be either due \"\n\t\t\t\t\t+ \"to the browser you are using (IE doesn't work) or image and script \"\n\t\t\t\t\t+ \"are saved on different servers or you run the script locally. \");\n\t\t\t}\n\t\t}\n\n\t\t// hide the original image\n\t\twidget.element.hide();\n\n\t\t// for usage in events\n\t\tvar that = widget;\n\n\t\twidget.$canvas.bind(\"mousemove\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateCurrentColor( that, color.red, color.green, color.blue );\n\t\t});\n\n\t\twidget.$canvas.bind(\"click\", function(e){\n var point = imageCoordinates( that, e.pageX, e.pageY );\n var color = lookupColor( that.imageData, point );\n\n updateSelectedColor( that, color.red, color.green, color.blue );\n\t\t\tthat._trigger(\"afterColorSelected\", 0, that.selectedColor());\n\t\t});\n\n\t\twidget.$canvas.bind(\"mouseleave\", function(e){\n\t\t\tupdateCurrentColor(that, 255, 255, 255);\n\t\t});\n\n\t\t// hope that helps to prevent memory leaks\n\t\t$(window).unload(function(e){\n\t\t\tthat.destroy();\n\t\t});\n\t};\n\n // for pageX and pageY, determine image coordinates using offset\n var imageCoordinates = function( widget, pageX, pageY ) {\n var offset = widget.$canvas.offset();\n\n return { x: Math.round( pageX - offset.left ),\n y: Math.round( pageY - offset.top ) };\n }\n\n // lookup color values for point [x,y] location in image\n var lookupColor = function( imageData, point) {\n var pixel = ((point.y * imageData.width) + point.x) * 4;\n\n return { red: imageData.data[pixel],\n green: imageData.data[(pixel + 1)],\n blue: imageData.data[(pixel + 2)] }\n\n }\n\n\tvar updateCurrentColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 62, canvasHeight - 32, 30, 30);\n\t}\n\n\tvar updateSelectedColor = function(widget, red, green, blue) {\n\t\tvar c = widget.ctx;\n\t\tvar canvasWidth = widget.$canvas.attr(\"width\");\n\t\tvar canvasHeight = widget.$canvas.attr(\"height\");\n\n\t\t// draw current Color\n\t\tc.fillStyle = \"rgb(\" + red + \",\" + green + \",\" + blue + \")\";\n\t\tc.fillRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// draw border\n\t\tc.lineWidth = \"3\"\n\t\tc.lineJoin = \"round\";\n\t\tc.strokeRect (canvasWidth - 32, canvasHeight - 32, 30, 30);\n\n\t\t// set new selected color\n\t\tvar newColor = [red, green, blue];\n\t\twidget.color = newColor;\n\t}\n\n\treturn {\n\t\t// default options\n\t\toptions: {\n\n\t\t},\n\n\t\t_create: function() {\n\t\t\tif (this.element.get(0).tagName.toLowerCase() === 'img') {\n\t\t\t\tif (this.element.get(0).complete) {\n\t\t\t\t\t_createImageColorPicker(this);\n\t\t\t\t} else {\n\t\t\t\t\tthis.element.bind('load', { that: this }, function(e){\n\t\t\t\t\t\tvar that = e.data.that;\n\t\t\t\t\t\t_createImageColorPicker(that);\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\n\t\tdestroy: function() {\n\t\t\t// default destroy\n\t\t\t$.Widget.prototype.destroy.apply(this, arguments);\n\n\t\t\t// remove possible large array with pixel data\n\t\t\tthis.imageData = null;\n\n\t\t\t// remove additional elements\n\t\t\tthis.$canvas.remove();\n\t\t\tthis.element.unbind('load');\n\t\t\tthis.element.unwrap();\n\t\t\tthis.element.show();\n\t\t},\n\n\t\tselectedColor: function() {\n\t\t\treturn \"#\" + _d2h(this.color[0]) + _d2h(this.color[1]) + _d2h(this.color[2]);\n\t\t}\n\n\t};\n}();\n"}}
-{"repo": "Fantomas42/easy-extract", "pr_number": 2, "title": "Handle spaces by quoting the command string instead of escaping", "state": "closed", "merged_at": null, "additions": 10, "deletions": 10, "files_changed": ["easy_extract/__init__.py", "easy_extract/archive.py", "easy_extract/archives/hj_split.py", "easy_extract/archives/media.py", "easy_extract/archives/rar.py", "easy_extract/archives/seven_zip.py", "easy_extract/archives/xtm.py"], "files_before": {"easy_extract/__init__.py": "\"\"\"easy_extract module\"\"\"\n__version__ = '0.1.2'\n__license__ = 'GPL'\n\n__author__ = 'Fantomas42'\n__email__ = 'fantomas42@gmail.com'\n\n__url__ = 'https://github.com/Fantomas42/easy-extract'\n", "easy_extract/archive.py": "\"\"\"Archive collection modules\"\"\"\nimport os\n\nCHAR_TO_ESCAPE = (' ', '(', ')', '*', \"'\", '\"', '&')\n\n\nclass BaseFileCollection(object):\n \"\"\"Base file collection\"\"\"\n\n def __init__(self, name, path='.', filenames=[]):\n self.name = name\n self.path = path\n self.filenames = filenames\n\n @property\n def files(self):\n return self.filenames\n\n def escape_filename(self, filename):\n \"\"\"Escape a filename\"\"\"\n for char in CHAR_TO_ESCAPE:\n filename = filename.replace(char, '\\%s' % char)\n return filename\n\n def get_path_filename(self, filename):\n \"\"\"Concatenate path and filename\"\"\"\n return os.path.join(self.path, filename)\n\n def get_command_filename(self, filename):\n \"\"\"Convert filename for command line\"\"\"\n return self.escape_filename(self.get_path_filename(filename))\n\n def remove(self):\n \"\"\"Remove all files collection\"\"\"\n return os.system('rm -f %s' % ' '.join(\n [self.get_command_filename(f) for f in self.files]))\n\n\nclass MedKit(BaseFileCollection):\n \"\"\"MedKit is collection of par2 files\"\"\"\n\n def __init__(self, name, path='.', filenames=[]):\n super(MedKit, self).__init__(name, path, filenames)\n self.medkits = []\n self.find_medkits(self.filenames)\n\n @property\n def files(self):\n return self.medkits\n\n def is_medkit_file(self, filename):\n \"\"\"Check if the filename is a medkit\"\"\"\n return bool(filename.startswith(self.name) and\n filename.lower().endswith('.par2'))\n\n def find_medkits(self, filenames=[]):\n \"\"\"Find files for building the medkit\"\"\"\n for filename in filenames:\n if self.is_medkit_file(filename) and filename not in self.medkits:\n self.medkits.append(filename)\n self.medkits.sort()\n\n def check_and_repair(self, silent=False):\n \"\"\"Check and repair with medkits\"\"\"\n if self.medkits:\n options = silent and '-qq' or ''\n root_medkit = self.get_command_filename(self.medkits[0])\n extra_kits = '%s*' % self.get_command_filename(self.name)\n command = 'par2 r %s %s %s' % (options, root_medkit, extra_kits)\n result = os.system(command)\n return bool(not result)\n return False\n\n\nclass Archive(MedKit):\n \"\"\"Archive is a collection of archive files and a MedKit\"\"\"\n archive_type = 'undefined'\n ALLOWED_EXTENSIONS = []\n\n def __init__(self, name, path='.', filenames=[]):\n super(Archive, self).__init__(name, path, filenames)\n self.archives = []\n self.find_archives(self.filenames)\n\n @property\n def files(self):\n return self.archives + self.medkits\n\n @classmethod\n def is_archive_file(cls, filename):\n \"\"\"Check if the filename is allowed for the Archive\"\"\"\n for regext in cls.ALLOWED_EXTENSIONS:\n if regext.search(filename):\n return regext.split(filename)[0]\n return False\n\n def find_archives(self, filenames=[]):\n \"\"\"Find files for building the archive\"\"\"\n for filename in filenames:\n if (filename.startswith(self.name) and\n self.is_archive_file(filename) and\n filename not in self.archives):\n self.archives.append(filename)\n self.archives.sort()\n\n def extract(self, repair=True):\n \"\"\"Extract the archive and do integrity checking\"\"\"\n extraction = self._extract()\n\n if not extraction and repair:\n if self.check_and_repair():\n extraction = self._extract()\n\n return extraction\n\n def _extract(self):\n \"\"\"Extract the archive\"\"\"\n raise NotImplementedError\n\n def __str__(self):\n if self.medkits:\n return '%s (%i %s archives, %i par2 files)' % (\n self.name, len(self.archives),\n self.archive_type, len(self.medkits))\n return '%s (%i %s archives)' % (\n self.name, len(self.archives), self.archive_type)\n", "easy_extract/archives/hj_split.py": "\"\"\"HJ Split archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nEXTENSIONS = [re.compile('\\.\\d{3}$', re.I)]\n\n\nclass HJSplitArchive(Archive):\n \"\"\"The HJ Split format\"\"\"\n archive_type = 'hj-split'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n new_filename = self.escape_filename(self.name)\n first_archive = self.get_command_filename(self.archives[0])\n\n print 'Extracting %s...' % new_filename\n\n os.system('cat %s > %s' % (first_archive, new_filename))\n\n for archive in self.archives[1:]:\n archive = self.get_command_filename(archive)\n os.system('cat %s >> %s' % (archive, new_filename))\n\n return True\n\n def remove(self):\n pass\n", "easy_extract/archives/media.py": "\"\"\"Media archive file format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nRAW_EXTENSIONS = ['\\.AVI', '\\.OGG', '\\.OGV',\n '\\.MP4', '\\.MPG', '\\.MPEG',\n '\\.MKV', '\\.M4V']\n\nEXTENSIONS = [re.compile('%s$' % ext, re.I) for ext in RAW_EXTENSIONS]\n\n\nclass MediaArchive(Archive):\n \"\"\"The Rar format Archive\"\"\"\n archive_type = 'media'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n first_archive = self.get_command_filename(self.archives[0])\n return not os.system('synoindex -a %s' % first_archive)\n\n def remove(self):\n pass\n", "easy_extract/archives/rar.py": "\"\"\"Rar archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nEXTENSIONS = [re.compile('\\.r\\d{2}$', re.I),\n re.compile('\\.part\\d+\\.rar$', re.I),\n re.compile('\\.rar$', re.I)]\n\n\nclass RarArchive(Archive):\n \"\"\"The Rar format Archive\"\"\"\n archive_type = 'rar'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n if '%s.rar' % self.name in self.archives:\n first_archive = self.get_command_filename('%s.rar' % self.name)\n else:\n first_archive = self.get_command_filename(self.archives[0])\n\n return not os.system('unrar e -o+ %s' % first_archive)\n", "easy_extract/archives/seven_zip.py": "\"\"\"7zip archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nRAW_EXTENSIONS = ['\\.ARJ', '\\.CAB', '\\.CHM', '\\.CPIO',\n '\\.DMG', '\\.HFS', '\\.LZH', '\\.LZMA',\n '\\.NSIS', '\\.UDF', '\\.WIM', '\\.XAR',\n '\\.Z', '\\.ZIP', '\\.GZIP', '\\.TAR']\n\nEXTENSIONS = [re.compile('%s$' % ext, re.I) for ext in RAW_EXTENSIONS]\n\n\nclass SevenZipArchive(Archive):\n \"\"\"The 7z unarchiver is used for many formats\"\"\"\n archive_type = '7zip'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n first_archive = self.get_command_filename(self.archives[0])\n return not os.system('7z e -y %s' % first_archive)\n", "easy_extract/archives/xtm.py": "\"\"\"Xtm archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nEXTENSIONS = [re.compile('\\.\\d{3}\\.xtm$', re.I),\n re.compile('\\.xtm$', re.I)]\n\n\nclass XtmArchive(Archive):\n \"\"\"The XTM archive format\"\"\"\n archive_type = 'xtm'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n new_filename = self.escape_filename(self.name)\n first_archive = self.get_command_filename(self.archives[0])\n\n print 'Extracting %s...' % new_filename\n\n os.system('dd if=%s skip=1 ibs=104 status=noxfer > %s 2>/dev/null' %\n (first_archive, new_filename))\n\n for archive in self.archives[1:]:\n archive = self.get_command_filename(archive)\n os.system('cat %s >> %s' % (archive, new_filename))\n\n return True\n\n def remove(self):\n pass\n"}, "files_after": {"easy_extract/__init__.py": "\"\"\"easy_extract module\"\"\"\n__version__ = '0.1.3'\n__license__ = 'GPL'\n\n__author__ = 'Fantomas42'\n__email__ = 'fantomas42@gmail.com'\n\n__url__ = 'https://github.com/Fantomas42/easy-extract'\n", "easy_extract/archive.py": "\"\"\"Archive collection modules\"\"\"\nimport os\n\nCHAR_TO_ESCAPE = ('(', ')', '*', \"'\", '\"', '&')\n\n\nclass BaseFileCollection(object):\n \"\"\"Base file collection\"\"\"\n\n def __init__(self, name, path='.', filenames=[]):\n self.name = name\n self.path = path\n self.filenames = filenames\n\n @property\n def files(self):\n return self.filenames\n\n def escape_filename(self, filename):\n \"\"\"Escape a filename\"\"\"\n for char in CHAR_TO_ESCAPE:\n filename = filename.replace(char, '\\%s' % char)\n return filename\n\n def get_path_filename(self, filename):\n \"\"\"Concatenate path and filename\"\"\"\n return os.path.join(self.path, filename)\n\n def get_command_filename(self, filename):\n \"\"\"Convert filename for command line\"\"\"\n return self.escape_filename(self.get_path_filename(filename))\n\n def remove(self):\n \"\"\"Remove all files collection\"\"\"\n return os.system('rm -f %s' % ' '.join(\n [self.get_command_filename(f) for f in self.files]))\n\n\nclass MedKit(BaseFileCollection):\n \"\"\"MedKit is collection of par2 files\"\"\"\n\n def __init__(self, name, path='.', filenames=[]):\n super(MedKit, self).__init__(name, path, filenames)\n self.medkits = []\n self.find_medkits(self.filenames)\n\n @property\n def files(self):\n return self.medkits\n\n def is_medkit_file(self, filename):\n \"\"\"Check if the filename is a medkit\"\"\"\n return bool(filename.startswith(self.name) and\n filename.lower().endswith('.par2'))\n\n def find_medkits(self, filenames=[]):\n \"\"\"Find files for building the medkit\"\"\"\n for filename in filenames:\n if self.is_medkit_file(filename) and filename not in self.medkits:\n self.medkits.append(filename)\n self.medkits.sort()\n\n def check_and_repair(self, silent=False):\n \"\"\"Check and repair with medkits\"\"\"\n if self.medkits:\n options = silent and '-qq' or ''\n root_medkit = self.get_command_filename(self.medkits[0])\n extra_kits = '%s*' % self.get_command_filename(self.name)\n command = 'par2 r %s %s %s' % (options, root_medkit, extra_kits)\n result = os.system(command)\n return bool(not result)\n return False\n\n\nclass Archive(MedKit):\n \"\"\"Archive is a collection of archive files and a MedKit\"\"\"\n archive_type = 'undefined'\n ALLOWED_EXTENSIONS = []\n\n def __init__(self, name, path='.', filenames=[]):\n super(Archive, self).__init__(name, path, filenames)\n self.archives = []\n self.find_archives(self.filenames)\n\n @property\n def files(self):\n return self.archives + self.medkits\n\n @classmethod\n def is_archive_file(cls, filename):\n \"\"\"Check if the filename is allowed for the Archive\"\"\"\n for regext in cls.ALLOWED_EXTENSIONS:\n if regext.search(filename):\n return regext.split(filename)[0]\n return False\n\n def find_archives(self, filenames=[]):\n \"\"\"Find files for building the archive\"\"\"\n for filename in filenames:\n if (filename.startswith(self.name) and\n self.is_archive_file(filename) and\n filename not in self.archives):\n self.archives.append(filename)\n self.archives.sort()\n\n def extract(self, repair=True):\n \"\"\"Extract the archive and do integrity checking\"\"\"\n extraction = self._extract()\n\n if not extraction and repair:\n if self.check_and_repair():\n extraction = self._extract()\n\n return extraction\n\n def _extract(self):\n \"\"\"Extract the archive\"\"\"\n raise NotImplementedError\n\n def __str__(self):\n if self.medkits:\n return '%s (%i %s archives, %i par2 files)' % (\n self.name, len(self.archives),\n self.archive_type, len(self.medkits))\n return '%s (%i %s archives)' % (\n self.name, len(self.archives), self.archive_type)\n", "easy_extract/archives/hj_split.py": "\"\"\"HJ Split archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nEXTENSIONS = [re.compile('\\.\\d{3}$', re.I)]\n\n\nclass HJSplitArchive(Archive):\n \"\"\"The HJ Split format\"\"\"\n archive_type = 'hj-split'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n new_filename = self.escape_filename(self.name)\n first_archive = self.get_command_filename(self.archives[0])\n\n print 'Extracting %s...' % new_filename\n\n os.system('cat \"%s\" > \"%s\"' % (first_archive, new_filename))\n\n for archive in self.archives[1:]:\n archive = self.get_command_filename(archive)\n os.system('cat \"%s\" >> \"%s\"' % (archive, new_filename))\n\n return True\n\n def remove(self):\n pass\n", "easy_extract/archives/media.py": "\"\"\"Media archive file format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nRAW_EXTENSIONS = ['\\.AVI', '\\.OGG', '\\.OGV',\n '\\.MP4', '\\.MPG', '\\.MPEG',\n '\\.MKV', '\\.M4V']\n\nEXTENSIONS = [re.compile('%s$' % ext, re.I) for ext in RAW_EXTENSIONS]\n\n\nclass MediaArchive(Archive):\n \"\"\"The Rar format Archive\"\"\"\n archive_type = 'media'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n first_archive = self.get_command_filename(self.archives[0])\n return not os.system('synoindex -a \"%s\"' % first_archive)\n\n def remove(self):\n pass\n", "easy_extract/archives/rar.py": "\"\"\"Rar archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nEXTENSIONS = [re.compile('\\.r\\d{2}$', re.I),\n re.compile('\\.part\\d+\\.rar$', re.I),\n re.compile('\\.rar$', re.I)]\n\n\nclass RarArchive(Archive):\n \"\"\"The Rar format Archive\"\"\"\n archive_type = 'rar'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n if '%s.rar' % self.name in self.archives:\n first_archive = self.get_command_filename('%s.rar' % self.name)\n else:\n first_archive = self.get_command_filename(self.archives[0])\n\n return not os.system('unrar e -o+ \"%s\"' % first_archive)\n", "easy_extract/archives/seven_zip.py": "\"\"\"7zip archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nRAW_EXTENSIONS = ['\\.ARJ', '\\.CAB', '\\.CHM', '\\.CPIO',\n '\\.DMG', '\\.HFS', '\\.LZH', '\\.LZMA',\n '\\.NSIS', '\\.UDF', '\\.WIM', '\\.XAR',\n '\\.Z', '\\.ZIP', '\\.GZIP', '\\.TAR']\n\nEXTENSIONS = [re.compile('%s$' % ext, re.I) for ext in RAW_EXTENSIONS]\n\n\nclass SevenZipArchive(Archive):\n \"\"\"The 7z unarchiver is used for many formats\"\"\"\n archive_type = '7zip'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n first_archive = self.get_command_filename(self.archives[0])\n return not os.system('7z e -y \"%s\"' % first_archive)\n", "easy_extract/archives/xtm.py": "\"\"\"Xtm archive format\"\"\"\nimport os\nimport re\n\nfrom easy_extract.archive import Archive\n\nEXTENSIONS = [re.compile('\\.\\d{3}\\.xtm$', re.I),\n re.compile('\\.xtm$', re.I)]\n\n\nclass XtmArchive(Archive):\n \"\"\"The XTM archive format\"\"\"\n archive_type = 'xtm'\n ALLOWED_EXTENSIONS = EXTENSIONS\n\n def _extract(self):\n new_filename = self.escape_filename(self.name)\n first_archive = self.get_command_filename(self.archives[0])\n\n print 'Extracting %s...' % new_filename\n\n os.system('dd if=\"%s\" skip=1 ibs=104 status=noxfer > \"%s\" 2>/dev/null'\n % (first_archive, new_filename))\n\n for archive in self.archives[1:]:\n archive = self.get_command_filename(archive)\n os.system('cat \"%s\" >> \"%s\"' % (archive, new_filename))\n\n return True\n\n def remove(self):\n pass\n"}}
-{"repo": "benlaurie/lucre", "pr_number": 3, "title": "make include folder and various bug-fixes", "state": "closed", "merged_at": null, "additions": 14, "deletions": 1, "files_changed": ["src/bankimp.cpp"], "files_before": {"src/bankimp.cpp": "#include \"bank.h\"\n#include \n\nstatic BIO *dout;\nstatic BIO *mout;\n\nconst char _NL[]=\"\\n\";\n\nvoid SetDumper(BIO *out)\n {\n dout=out;\n if(!mout)\n\tmout=out;\n }\n\nvoid SetDumper(FILE *f)\n {\n BIO *out=BIO_new(BIO_s_file());\n assert(out);\n BIO_set_fp(out,f,BIO_NOCLOSE);\n SetDumper(out);\n }\n\nvoid SetMonitor(BIO *out)\n { mout=out; }\n\nvoid SetMonitor(FILE *f)\n {\n BIO *out=BIO_new(BIO_s_file());\n assert(out);\n BIO_set_fp(out,f,BIO_NOCLOSE);\n SetMonitor(out);\n }\n\nBIGNUM *ReadNumber(BIO *in,const char *szTitle)\n {\n char szLine[10240];\n unsigned char aucBN[1024];\n int nTLen=strlen(szTitle);\n\n BIO_gets(in,szLine,sizeof szLine-1);\n if(strncmp(szLine,szTitle,nTLen))\n\t{\n\tfprintf(stderr,\"Got %s, expected %s\\n\",szLine,szTitle);\n\tassert(!\"Unexpected input\");\n\treturn NULL;\n\t}\n BIGNUM *bn=BN_new();\n\n int n=strcspn(szLine+nTLen,\"\\r\\n\");\n szLine[nTLen+n]='\\0';\n if(n&1)\n\t{\n\tmemmove(szLine+nTLen+1,szLine+nTLen,n+1);\n\tszLine[nTLen]='0';\n\t}\n\n for(n=0 ; szLine[nTLen+n*2] ; ++n)\n\t{\n\tint h;\n\n\tsscanf(&szLine[nTLen+n*2],\"%02x\",&h);\n\taucBN[n]=(unsigned char)h;\n\t}\n\t\n BN_bin2bn(aucBN,n,bn);\n\n return bn;\n }\n\nvoid DumpNumber(BIO *out,const char *szTitle,const BIGNUM *bn,\n\t\tconst char *szTrailer)\n {\n if(!out)\n\treturn;\n BIO_puts(out,szTitle);\n if(!bn)\n\tBIO_puts(out,\"(null)\");\n else\n\tBN_print(out,bn);\n BIO_puts(out,szTrailer);\n }\n\nvoid DumpNumber(const char *szTitle,const BIGNUM *bn,const char *szTrailer)\n { DumpNumber(dout,szTitle,bn,szTrailer); }\n\nvoid HexDump(BIO *out,const char *szTitle,const unsigned char *acBuf,\n\t int nLength)\n {\n if(!out)\n\treturn;\n BIO_puts(out,szTitle);\n for(int n=0 ; n < nLength ; ++n)\n\t{\n\tchar buf[3];\n\tsprintf(buf,\"%02X\",acBuf[n]);\n\tBIO_puts(out,buf);\n\t}\n BIO_puts(out,\"\\n\");\n }\n\nvoid HexDump(const char *szTitle,const unsigned char *acBuf,int nLength)\n { HexDump(dout,szTitle,acBuf,nLength); }\n\nPublicBank::PublicBank(Bank &bank)\n {\n m_pDH=DH_new();\n m_pDH->g=BN_dup(bank.g());\n m_pDH->p=BN_dup(bank.p());\n m_pDH->pub_key=BN_dup(bank.pub_key());\n }\n\nvoid Bank::cb(int n, int, void */*arg*/)\n {\n if(!mout)\n\treturn;\n\n char c='*';\n\n if (n == 0) c='.';\n if (n == 1) c='+';\n if (n == 2) c='*';\n if (n == 3) c='\\n';\n BIO_write(mout,&c,1);\n BIO_flush(mout);\n }\n\n/*const*/ BIGNUM *Bank::SignRequest(PublicCoinRequest &req)\n {\n InitCTX();\n\n BIGNUM *BtoA=BN_new();\n BN_mod_exp(BtoA,req.Request(),priv_key(),p(),m_ctx);\n DumpNumber(\"B->A= \",BtoA);\n\n return BtoA;\n }\n\nboolean Bank::Verify(Coin &coin)\n {\n InitCTX();\n\n BIGNUM *t=BN_new();\n if(!coin.GenerateCoinNumber(t,*this))\n\treturn false;\n BN_mod_exp(t,t,priv_key(),p(),m_ctx);\n DumpNumber(\"y^k= \",t);\n\n BN_sub(t,t,coin.Signature());\n boolean bRet=BN_is_zero(t);\n\n BN_free(t);\n\n return bRet;\n }\n\nvoid Bank::WriteBIO(BIO *bio)\n {\n PublicBank::WriteBIO(bio);\n DumpNumber(bio,\"private=\",priv_key());\n }\n\nvoid Bank::ReadBIO(BIO *bio)\n {\n PublicBank::ReadBIO(bio);\n m_pDH->priv_key=ReadNumber(bio,\"private=\");\n }\n\nvoid PublicBank::WriteBIO(BIO *bio)\n {\n DumpNumber(bio,\"g=\",g());\n DumpNumber(bio,\"p=\",p());\n DumpNumber(bio,\"public=\",pub_key());\n }\n\nvoid PublicBank::ReadBIO(BIO *bio)\n {\n m_pDH=DH_new();\n\n m_pDH->g=ReadNumber(bio,\"g=\");\n m_pDH->p=ReadNumber(bio,\"p=\");\n m_pDH->pub_key=ReadNumber(bio,\"public=\");\n\n Dump();\n }\n\nvoid UnsignedCoin::WriteBIO(BIO *bio)\n {\n DumpNumber(bio,\"id=\",m_bnCoinID);\n }\n\nvoid UnsignedCoin::ReadBIO(BIO *bio)\n {\n m_bnCoinID=ReadNumber(bio,\"id=\");\n }\n\nvoid Coin::WriteBIO(BIO *bio)\n {\n UnsignedCoin::WriteBIO(bio);\n DumpNumber(bio,\"signature=\",m_bnCoinSignature);\n }\n\nvoid Coin::ReadBIO(BIO *bio)\n {\n UnsignedCoin::ReadBIO(bio);\n m_bnCoinSignature=ReadNumber(bio,\"signature=\");\n Dump();\n }\n\nvoid PublicCoinRequest::WriteBIO(BIO *bio)\n {\n DumpNumber(bio,\"request=\",m_bnCoinRequest);\n }\n\nvoid PublicCoinRequest::ReadBIO(BIO *bio)\n {\n m_bnCoinRequest=ReadNumber(bio,\"request=\");\n }\n\nvoid CoinRequest::WriteBIO(BIO *bio)\n {\n PublicCoinRequest::WriteBIO(bio);\n m_coin.WriteBIO(bio);\n DumpNumber(bio,\"blinding=\",m_bnBlindingFactor);\n }\n\nvoid CoinRequest::ReadBIO(BIO *bio)\n {\n PublicCoinRequest::ReadBIO(bio);\n m_coin.ReadBIO(bio);\n m_bnBlindingFactor=ReadNumber(bio,\"blinding=\");\n\n Dump();\n }\n\n"}, "files_after": {"src/bankimp.cpp": "\n#ifdef __APPLE__\n#pragma GCC diagnostic ignored \"-Wdeprecated-declarations\"\n#endif\n\n#include \"bank.h\"\n#include \n\n#ifdef _WIN32\n#include \n#endif\n\nstatic BIO *dout;\nstatic BIO *mout;\n\nconst char _NL[]=\"\\n\";\n\nvoid SetDumper(BIO *out)\n {\n dout=out;\n if(!mout)\n\tmout=out;\n }\n\nvoid SetDumper(FILE *f)\n {\n BIO *out=BIO_new(BIO_s_file());\n assert(out);\n BIO_set_fp(out,f,BIO_NOCLOSE);\n SetDumper(out);\n }\n\nvoid SetMonitor(BIO *out)\n { mout=out; }\n\nvoid SetMonitor(FILE *f)\n {\n BIO *out=BIO_new(BIO_s_file());\n assert(out);\n BIO_set_fp(out,f,BIO_NOCLOSE);\n SetMonitor(out);\n }\n\nBIGNUM *ReadNumber(BIO *in,const char *szTitle)\n {\n char szLine[10240];\n unsigned char aucBN[1024];\n int nTLen=strlen(szTitle);\n\n BIO_gets(in,szLine,sizeof szLine-1);\n if(strncmp(szLine,szTitle,nTLen))\n\t{\n\tfprintf(stderr,\"Got %s, expected %s\\n\",szLine,szTitle);\n\tassert(!\"Unexpected input\");\n\treturn NULL;\n\t}\n BIGNUM *bn=BN_new();\n\n int n=strcspn(szLine+nTLen,\"\\r\\n\");\n szLine[nTLen+n]='\\0';\n if(n&1)\n\t{\n\tmemmove(szLine+nTLen+1,szLine+nTLen,n+1);\n\tszLine[nTLen]='0';\n\t}\n\n for(n=0 ; szLine[nTLen+n*2] ; ++n)\n\t{\n\tint h;\n\n\tsscanf(&szLine[nTLen+n*2],\"%02x\",&h);\n\taucBN[n]=(unsigned char)h;\n\t}\n\t\n BN_bin2bn(aucBN,n,bn);\n\n return bn;\n }\n\nvoid DumpNumber(BIO *out,const char *szTitle,const BIGNUM *bn,\n\t\tconst char *szTrailer)\n {\n if(!out)\n\treturn;\n BIO_puts(out,szTitle);\n if(!bn)\n\tBIO_puts(out,\"(null)\");\n else\n\tBN_print(out,bn);\n BIO_puts(out,szTrailer);\n }\n\nvoid DumpNumber(const char *szTitle,const BIGNUM *bn,const char *szTrailer)\n { DumpNumber(dout,szTitle,bn,szTrailer); }\n\nvoid HexDump(BIO *out,const char *szTitle,const unsigned char *acBuf,\n\t int nLength)\n {\n if(!out)\n\treturn;\n BIO_puts(out,szTitle);\n for(int n=0 ; n < nLength ; ++n)\n\t{\n\tchar buf[3];\n\tsprintf(buf,\"%02X\",acBuf[n]);\n\tBIO_puts(out,buf);\n\t}\n BIO_puts(out,\"\\n\");\n }\n\nvoid HexDump(const char *szTitle,const unsigned char *acBuf,int nLength)\n { HexDump(dout,szTitle,acBuf,nLength); }\n\nPublicBank::PublicBank(Bank &bank)\n {\n m_pDH=DH_new();\n m_pDH->g=BN_dup(bank.g());\n m_pDH->p=BN_dup(bank.p());\n m_pDH->pub_key=BN_dup(bank.pub_key());\n }\n\nvoid Bank::cb(int n, int, void * /*arg*/)\n {\n if(!mout)\n\treturn;\n\n char c='*';\n\n if (n == 0) c='.';\n if (n == 1) c='+';\n if (n == 2) c='*';\n if (n == 3) c='\\n';\n BIO_write(mout,&c,1);\n BIO_flush(mout);\n }\n\n/*const*/ BIGNUM *Bank::SignRequest(PublicCoinRequest &req)\n {\n InitCTX();\n\n BIGNUM *BtoA=BN_new();\n BN_mod_exp(BtoA,req.Request(),priv_key(),p(),m_ctx);\n DumpNumber(\"B->A= \",BtoA);\n\n return BtoA;\n }\n\nboolean Bank::Verify(Coin &coin)\n {\n InitCTX();\n\n BIGNUM *t=BN_new();\n if(!coin.GenerateCoinNumber(t,*this))\n\treturn false;\n BN_mod_exp(t,t,priv_key(),p(),m_ctx);\n DumpNumber(\"y^k= \",t);\n\n BN_sub(t,t,coin.Signature());\n boolean bRet=BN_is_zero(t);\n\n BN_free(t);\n\n return bRet;\n }\n\nvoid Bank::WriteBIO(BIO *bio)\n {\n PublicBank::WriteBIO(bio);\n DumpNumber(bio,\"private=\",priv_key());\n }\n\nvoid Bank::ReadBIO(BIO *bio)\n {\n PublicBank::ReadBIO(bio);\n m_pDH->priv_key=ReadNumber(bio,\"private=\");\n }\n\nvoid PublicBank::WriteBIO(BIO *bio)\n {\n DumpNumber(bio,\"g=\",g());\n DumpNumber(bio,\"p=\",p());\n DumpNumber(bio,\"public=\",pub_key());\n }\n\nvoid PublicBank::ReadBIO(BIO *bio)\n {\n m_pDH=DH_new();\n\n m_pDH->g=ReadNumber(bio,\"g=\");\n m_pDH->p=ReadNumber(bio,\"p=\");\n m_pDH->pub_key=ReadNumber(bio,\"public=\");\n\n Dump();\n }\n\nvoid UnsignedCoin::WriteBIO(BIO *bio)\n {\n DumpNumber(bio,\"id=\",m_bnCoinID);\n }\n\nvoid UnsignedCoin::ReadBIO(BIO *bio)\n {\n m_bnCoinID=ReadNumber(bio,\"id=\");\n }\n\nvoid Coin::WriteBIO(BIO *bio)\n {\n UnsignedCoin::WriteBIO(bio);\n DumpNumber(bio,\"signature=\",m_bnCoinSignature);\n }\n\nvoid Coin::ReadBIO(BIO *bio)\n {\n UnsignedCoin::ReadBIO(bio);\n m_bnCoinSignature=ReadNumber(bio,\"signature=\");\n Dump();\n }\n\nvoid PublicCoinRequest::WriteBIO(BIO *bio)\n {\n DumpNumber(bio,\"request=\",m_bnCoinRequest);\n }\n\nvoid PublicCoinRequest::ReadBIO(BIO *bio)\n {\n m_bnCoinRequest=ReadNumber(bio,\"request=\");\n }\n\nvoid CoinRequest::WriteBIO(BIO *bio)\n {\n PublicCoinRequest::WriteBIO(bio);\n m_coin.WriteBIO(bio);\n DumpNumber(bio,\"blinding=\",m_bnBlindingFactor);\n }\n\nvoid CoinRequest::ReadBIO(BIO *bio)\n {\n PublicCoinRequest::ReadBIO(bio);\n m_coin.ReadBIO(bio);\n m_bnBlindingFactor=ReadNumber(bio,\"blinding=\");\n\n Dump();\n }\n\n"}}
-{"repo": "xxx/acts_as_archive", "pr_number": 3, "title": "adding fix to return the correct datetimestamp for rails generator", "state": "closed", "merged_at": "2014-09-01T22:45:52Z", "additions": 5, "deletions": 4, "files_changed": ["lib/acts_as_archive/migration.rb"], "files_before": {"lib/acts_as_archive/migration.rb": "module ActsAsArchive\n module Migration\n \n def self.included(base)\n unless base.included_modules.include?(InstanceMethods)\n base.send :extend, ClassMethods\n base.class_eval do\n class < 'file', :timeout => 120, :location => Dir.tmpdir\n\n base_uri 'http://api.themoviedb.org/2.1'\n format :json\n \n def initialize(key, lang = 'en')\n @api_key = key\n @default_lang = lang\n end\n \n def search(query, lang = @default_lang)\n data = self.class.get(method_url('Movie.search', lang, query))\n \n result_or_empty_array(data, Movie)\n end\n\n # Read more about the parameters that can be passed to this method here:\n # http://api.themoviedb.org/2.1/methods/Movie.browse\n def browse(params = {}, lang = @default_lang)\n data = self.class.get(method_url('Movie.browse', lang), :query => {:order => \"asc\", :order_by => \"title\"}.merge(params))\n \n result_or_empty_array(data, Movie)\n end\n \n def search_person(query, lang = @default_lang)\n data = self.class.get(method_url('Person.search', lang, query))\n \n result_or_empty_array(data, Person)\n end\n \n def imdb_lookup(imdb_id, lang = @default_lang)\n data = self.class.get(method_url('Movie.imdbLookup', lang, imdb_id)).parsed_response\n if data.class != Array || data.first == \"Nothing found.\"\n nil\n else\n Movie.new(data.first, self)\n end\n end\n \n def get_info(id, lang = @default_lang)\n data = self.class.get(method_url('Movie.getInfo', lang, id)).parsed_response\n Movie.new(data.first, self)\n end\n\n def get_file_info(file, lang=@default_lang)\n hash = TMDBParty::MovieHasher.compute_hash(file)\n bytesize = file.size\n data = self.class.get(method_url('Media.getInfo', lang, hash, bytesize))\n\n result_or_empty_array(data, Movie)\n end\n\n def get_person(id, lang = @default_lang)\n data = self.class.get(method_url('Person.getInfo', lang, id)).parsed_response\n Person.new(data.first, self)\n end\n \n def get_genres(lang = @default_lang)\n data = self.class.get(method_url('Genres.getList', lang)).parsed_response\n data[1..-1].collect { |genre| Genre.new(genre) } # Skips the first, see spec/fixtures/genres_results.json\n end\n \n private\n\n def result_or_empty_array(data, klass)\n data = data.parsed_response\n if data.class != Array || data.first == \"Nothing found.\"\n []\n else\n data.collect { |object| klass.new(object, self) }\n end\n end\n\n def method_url(method, lang, *args)\n url = [method, lang, self.class.format, @api_key]\n url += args.collect{ |a| URI.escape(a.to_s) }\n '/' + url.join('/')\n end\n end\nend\n", "lib/tmdb_party/cast_member.rb": "module TMDBParty\n class CastMember\n include Attributes\n attr_reader :tmdb\n attributes :name, :url, :job, :department\n attributes :id, :type => Integer\n \n def initialize(values, tmdb)\n @tmdb = tmdb\n self.attributes = values\n end\n \n def character_name\n read_attribute('character')\n end\n \n def image_url\n read_attribute('profile')\n end\n \n def person\n tmdb.get_person(id)\n end\n \n def self.parse(data, tmdb)\n return unless data\n if data.is_a?(Array)\n data.collect do |person|\n CastMember.new(person, tmdb)\n end\n else\n [CastMember.new(data, tmdb)]\n end\n end\n end\nend", "lib/tmdb_party/category.rb": "module TMDBParty\n class Category\n include Attributes\n attributes :name, :url\n \n def initialize(values)\n self.attributes = values\n end\n \n def self.parse(data)\n return unless data\n data = data[\"category\"]\n if data.is_a?(Array)\n data.collect do |category|\n Category.new(category)\n end\n else\n [Category.new(data)]\n end\n end\n end\nend\n", "lib/tmdb_party/genre.rb": "module TMDBParty\n class Genre\n include Attributes\n attributes :id, :type => Integer\n attributes :name, :url\n \n def initialize(values)\n self.attributes = values\n end\n \n def self.parse(data)\n return unless data\n if data.is_a?(Array)\n data.collect do |g|\n Genre.new(g)\n end\n else\n [Genre.new(data)]\n end\n end\n end\nend\n", "lib/tmdb_party/movie.rb": "module TMDBParty\n class Movie\n include Attributes\n attr_reader :tmdb\n \n attributes :name, :overview, :id, :imdb_id, :movie_type, :url, :alternative_title, :translated, :certification\n attributes :released\n attributes :id, :popularity, :type => Integer\n attributes :score, :type => Float\n \n attributes :tagline, :lazy => :get_info!\n attributes :posters, :backdrops, :lazy => :get_info!, :type => Image\n attributes :homepage, :lazy => :get_info!\n attributes :trailer, :lazy => :get_info!\n attributes :runtime, :lazy => :get_info!, :type => Integer\n attributes :genres, :lazy => :get_info!, :type => Genre\n attributes :countries, :lazy => :get_info!, :type => Country\n attributes :studios, :lazy => :get_info!, :type => Studio\n \n alias_method :translated?, :translated\n \n def initialize(values, tmdb)\n @tmdb = tmdb\n self.attributes = values\n end\n \n def get_info!\n movie = tmdb.get_info(self.id)\n @attributes.merge!(movie.attributes) if movie\n @loaded = true\n end\n\n def cast\n # TODO: This needs refactoring\n CastMember.parse(read_or_load_attribute('cast', nil, :get_info!), tmdb)\n end\n\n def language\n read_attribute('language').downcase.to_sym\n end\n\n def last_modified_at\n # Date from TMDB is always in MST, but no timezone is present in date string\n Time.parse(read_attribute('last_modified_at') + ' MST')\n end\n\n def directors\n find_cast('Directing')\n end\n\n def actors\n find_cast('Actors')\n end\n\n def writers\n find_cast('Writing')\n end\n\n def producers\n find_cast('Production')\n end\n \n private\n \n def find_cast(type)\n return [] unless cast\n guys = cast.select{|c| c.department == type}\n end\n\n end\n \nend", "lib/tmdb_party/person.rb": "module TMDBParty\n class Person\n include Attributes\n attr_reader :tmdb\n attributes :id, :popularity, :type => Integer\n attributes :score, :type => Float\n attributes :name, :url, :biography\n \n attributes :birthplace, :birthday, :lazy => :get_info!\n \n def initialize(values, tmdb)\n @tmdb = tmdb\n self.attributes = values\n end\n \n def biography\n # HTTParty does not parse the encoded hexadecimal properly. It does not consider 000F to be a hex, but 000f is\n # A bug has been submitted about this\n read_attribute('biography').gsub(\"\\\\n\", \"\\n\").gsub(/\\\\u([0-9A-F]{4})/) { [$1.hex].pack(\"U\") }\n end\n \n \n def get_info!\n person = tmdb.get_person(self.id)\n @attributes.merge!(person.attributes) if person\n @loaded = true\n end\n end\nend", "lib/tmdb_party/studio.rb": "module TMDBParty\n class Studio\n include Attributes\n attributes :name, :url\n \n def self.parse(data)\n return unless data\n if data.is_a?(Array)\n data.map { |row| Studio.new(row) }\n else\n [Studio.new(data)]\n end\n end\n \n def initialize(attributes)\n self.attributes = attributes\n end\n end\nend", "lib/tmdb_party/video.rb": "module TMDBParty\n class Video\n attr_reader :url\n \n def initialize(url)\n @url = url\n end\n \n def self.parse(data)\n return unless data\n if data.is_a?(Array)\n data.collect do |url|\n Video.new(url)\n end\n else\n Video.new(data)\n end\n end\n end\nend"}, "files_after": {"lib/tmdb_party.rb": "require 'httparty'\n\n%w[extras/httparty_icebox extras/attributes entity video genre person image country studio cast_member movie extras/movie_hasher].each do |class_name|\n require \"tmdb_party/#{class_name}\"\nend\n\nmodule TMDBParty\n class Base\n include HTTParty\n include HTTParty::Icebox\n cache :store => 'file', :timeout => 120, :location => Dir.tmpdir\n\n base_uri 'http://api.themoviedb.org/2.1'\n format :json\n \n def initialize(key, lang = 'en')\n @api_key = key\n @default_lang = lang\n end\n \n def search(query, lang = @default_lang)\n data = self.class.get(method_url('Movie.search', lang, query))\n \n result_or_empty_array(data, Movie)\n end\n\n # Read more about the parameters that can be passed to this method here:\n # http://api.themoviedb.org/2.1/methods/Movie.browse\n def browse(params = {}, lang = @default_lang)\n data = self.class.get(method_url('Movie.browse', lang), :query => {:order => \"asc\", :order_by => \"title\"}.merge(params))\n \n result_or_empty_array(data, Movie)\n end\n \n def search_person(query, lang = @default_lang)\n data = self.class.get(method_url('Person.search', lang, query))\n \n result_or_empty_array(data, Person)\n end\n \n def imdb_lookup(imdb_id, lang = @default_lang)\n data = self.class.get(method_url('Movie.imdbLookup', lang, imdb_id)).parsed_response\n if data.class != Array || data.first == \"Nothing found.\"\n nil\n else\n Movie.new(data.first, self)\n end\n end\n \n def get_info(id, lang = @default_lang)\n data = self.class.get(method_url('Movie.getInfo', lang, id)).parsed_response\n Movie.new(data.first, self)\n end\n\n def get_file_info(file, lang=@default_lang)\n hash = TMDBParty::MovieHasher.compute_hash(file)\n bytesize = file.size\n data = self.class.get(method_url('Media.getInfo', lang, hash, bytesize))\n\n result_or_empty_array(data, Movie)\n end\n\n def get_person(id, lang = @default_lang)\n data = self.class.get(method_url('Person.getInfo', lang, id)).parsed_response\n Person.new(data.first, self)\n end\n \n def get_genres(lang = @default_lang)\n data = self.class.get(method_url('Genres.getList', lang)).parsed_response\n data[1..-1].collect { |genre| Genre.new(genre) } # Skips the first, see spec/fixtures/genres_results.json\n end\n \n private\n\n def result_or_empty_array(data, klass)\n data = data.parsed_response\n if data.class != Array || data.first == \"Nothing found.\"\n []\n else\n data.collect { |object| klass.new(object, self) }\n end\n end\n\n def method_url(method, lang, *args)\n url = [method, lang, self.class.format, @api_key]\n url += args.collect{ |a| URI.escape(a.to_s) }\n '/' + url.join('/')\n end\n end\nend\n", "lib/tmdb_party/cast_member.rb": "module TMDBParty\n class CastMember < Entity\n attr_reader :tmdb\n attributes :name, :url, :job, :department\n attributes :id, :type => Integer\n \n def character_name\n read_attribute('character')\n end\n \n def image_url\n read_attribute('profile')\n end\n \n def person\n tmdb.get_person(id)\n end\n end\nend\n", "lib/tmdb_party/category.rb": "module TMDBParty\n class Category < Entity\n attributes :name, :url\n end\nend\n", "lib/tmdb_party/entity.rb": "module TMDBParty\n class Entity\n include Attributes\n def initialize(values, tmdb=nil)\n @tmdb = tmdb\n self.attributes = values\n end\n\n def self.parse(data, tmdb=nil)\n return unless data\n if data.is_a?(Array)\n data.collect do |person|\n self.new(person, tmdb)\n end\n else\n [self.new(data, tmdb)]\n end\n end\n end\nend\n", "lib/tmdb_party/genre.rb": "module TMDBParty\n class Genre < Entity\n attributes :id, :type => Integer\n attributes :name, :url\n end\nend\n", "lib/tmdb_party/movie.rb": "module TMDBParty\n class Movie < Entity\n attr_reader :tmdb\n \n attributes :name, :overview, :id, :imdb_id, :movie_type, :url, :alternative_title, :translated, :certification\n attributes :released\n attributes :id, :popularity, :type => Integer\n attributes :score, :type => Float\n \n attributes :tagline, :lazy => :get_info!\n attributes :posters, :backdrops, :lazy => :get_info!, :type => Image\n attributes :homepage, :lazy => :get_info!\n attributes :trailer, :lazy => :get_info!\n attributes :runtime, :lazy => :get_info!, :type => Integer\n attributes :genres, :lazy => :get_info!, :type => Genre\n attributes :countries, :lazy => :get_info!, :type => Country\n attributes :studios, :lazy => :get_info!, :type => Studio\n \n alias_method :translated?, :translated\n \n def get_info!\n movie = tmdb.get_info(self.id)\n @attributes.merge!(movie.attributes) if movie\n @loaded = true\n end\n\n def cast\n # TODO: This needs refactoring\n CastMember.parse(read_or_load_attribute('cast', nil, :get_info!), tmdb)\n end\n\n def language\n read_attribute('language').downcase.to_sym\n end\n\n def last_modified_at\n # Date from TMDB is always in MST, but no timezone is present in date string\n Time.parse(read_attribute('last_modified_at') + ' MST')\n end\n\n def directors\n find_cast('Directing')\n end\n\n def actors\n find_cast('Actors')\n end\n\n def writers\n find_cast('Writing')\n end\n\n def producers\n find_cast('Production')\n end\n \n private\n \n def find_cast(type)\n return [] unless cast\n guys = cast.select{|c| c.department == type}\n end\n\n end\n \nend\n", "lib/tmdb_party/person.rb": "module TMDBParty\n class Person < Entity\n attr_reader :tmdb\n attributes :id, :popularity, :type => Integer\n attributes :score, :type => Float\n attributes :name, :url, :biography\n \n attributes :birthplace, :birthday, :lazy => :get_info!\n \n def biography\n # HTTParty does not parse the encoded hexadecimal properly. It does not consider 000F to be a hex, but 000f is\n # A bug has been submitted about this\n read_attribute('biography').gsub(\"\\\\n\", \"\\n\").gsub(/\\\\u([0-9A-F]{4})/) { [$1.hex].pack(\"U\") }\n end\n \n \n def get_info!\n person = tmdb.get_person(self.id)\n @attributes.merge!(person.attributes) if person\n @loaded = true\n end\n end\nend\n", "lib/tmdb_party/studio.rb": "module TMDBParty\n class Studio < Entity\n attributes :name, :url\n end\nend\n", "lib/tmdb_party/video.rb": "module TMDBParty\n class Video < Entity\n attr_reader :url\n \n def initialize(url)\n super\n @url = url\n end\n end\nend\n"}}
-{"repo": "rawwell/django", "pr_number": 1, "title": "0.90 bugfixes Need to check security parameters", "state": "open", "merged_at": null, "additions": 83, "deletions": 35, "files_changed": ["django/bin/compile-messages.py", "django/core/db/backends/postgresql.py", "django/core/handlers/modpython.py", "django/core/meta/__init__.py", "django/core/meta/fields.py"], "files_before": {"django/bin/compile-messages.py": "#!/usr/bin/env python\n\nimport optparse\nimport os\nimport sys\n\ntry:\n set\nexcept NameError:\n from sets import Set as set # For Python 2.3\n\n\ndef compile_messages(locale=None):\n basedirs = (os.path.join('conf', 'locale'), 'locale')\n if os.environ.get('DJANGO_SETTINGS_MODULE'):\n from django.conf import settings\n basedirs += settings.LOCALE_PATHS\n\n # Gather existing directories.\n basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))\n\n if not basedirs:\n print \"This script should be run from the Django SVN tree or your project or app tree, or with the settings module specified.\"\n sys.exit(1)\n\n for basedir in basedirs:\n if locale:\n basedir = os.path.join(basedir, locale, 'LC_MESSAGES')\n compile_messages_in_dir(basedir)\n\ndef compile_messages_in_dir(basedir):\n for dirpath, dirnames, filenames in os.walk(basedir):\n for f in filenames:\n if f.endswith('.po'):\n sys.stderr.write('processing file %s in %s\\n' % (f, dirpath))\n pf = os.path.splitext(os.path.join(dirpath, f))[0]\n # Store the names of the .mo and .po files in an environment\n # variable, rather than doing a string replacement into the\n # command, so that we can take advantage of shell quoting, to\n # quote any malicious characters/escaping.\n # See http://cyberelk.net/tim/articles/cmdline/ar01s02.html\n os.environ['djangocompilemo'] = pf + '.mo'\n os.environ['djangocompilepo'] = pf + '.po'\n if sys.platform == 'win32': # Different shell-variable syntax\n cmd = 'msgfmt --check-format -o \"%djangocompilemo%\" \"%djangocompilepo%\"'\n else:\n cmd = 'msgfmt --check-format -o \"$djangocompilemo\" \"$djangocompilepo\"'\n os.system(cmd)\n\ndef main():\n parser = optparse.OptionParser()\n parser.add_option('-l', '--locale', dest='locale',\n help=\"The locale to process. Default is to process all.\")\n parser.add_option('--settings',\n help='Python path to settings module, e.g. \"myproject.settings\". If provided, all LOCALE_PATHS will be processed. If this isn\\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be checked as well.')\n options, args = parser.parse_args()\n if len(args):\n parser.error(\"This program takes no arguments\")\n if options.settings:\n os.environ['DJANGO_SETTINGS_MODULE'] = options.settings\n compile_messages(options.locale)\n\nif __name__ == \"__main__\":\n main()\n", "django/core/handlers/modpython.py": "import os\nfrom pprint import pformat\n\nfrom django import http\nfrom django.core import signals\nfrom django.core.handlers.base import BaseHandler\nfrom django.dispatch import dispatcher\nfrom django.utils import datastructures\nfrom django.utils.encoding import force_unicode, smart_str\n\n# NOTE: do *not* import settings (or any module which eventually imports\n# settings) until after ModPythonHandler has been called; otherwise os.environ\n# won't be set up correctly (with respect to settings).\n\nclass ModPythonRequest(http.HttpRequest):\n def __init__(self, req):\n self._req = req\n self.path = force_unicode(req.uri)\n\n def __repr__(self):\n # Since this is called as part of error handling, we need to be very\n # robust against potentially malformed input.\n try:\n get = pformat(self.GET)\n except:\n get = ''\n try:\n post = pformat(self.POST)\n except:\n post = ''\n try:\n cookies = pformat(self.COOKIES)\n except:\n cookies = ''\n try:\n meta = pformat(self.META)\n except:\n meta = ''\n return smart_str(u'' %\n (self.path, unicode(get), unicode(post),\n unicode(cookies), unicode(meta)))\n\n def get_full_path(self):\n return '%s%s' % (self.path, self._req.args and ('?' + self._req.args) or '')\n\n def is_secure(self):\n try:\n return self._req.is_https()\n except AttributeError:\n # mod_python < 3.2.10 doesn't have req.is_https().\n return self._req.subprocess_env.get('HTTPS', '').lower() in ('on', '1')\n\n def _load_post_and_files(self):\n \"Populates self._post and self._files\"\n if 'content-type' in self._req.headers_in and self._req.headers_in['content-type'].startswith('multipart'):\n self._post, self._files = http.parse_file_upload(self._req.headers_in, self.raw_post_data)\n else:\n self._post, self._files = http.QueryDict(self.raw_post_data, encoding=self._encoding), datastructures.MultiValueDict()\n\n def _get_request(self):\n if not hasattr(self, '_request'):\n self._request = datastructures.MergeDict(self.POST, self.GET)\n return self._request\n\n def _get_get(self):\n if not hasattr(self, '_get'):\n self._get = http.QueryDict(self._req.args, encoding=self._encoding)\n return self._get\n\n def _set_get(self, get):\n self._get = get\n\n def _get_post(self):\n if not hasattr(self, '_post'):\n self._load_post_and_files()\n return self._post\n\n def _set_post(self, post):\n self._post = post\n\n def _get_cookies(self):\n if not hasattr(self, '_cookies'):\n self._cookies = http.parse_cookie(self._req.headers_in.get('cookie', ''))\n return self._cookies\n\n def _set_cookies(self, cookies):\n self._cookies = cookies\n\n def _get_files(self):\n if not hasattr(self, '_files'):\n self._load_post_and_files()\n return self._files\n\n def _get_meta(self):\n \"Lazy loader that returns self.META dictionary\"\n if not hasattr(self, '_meta'):\n self._meta = {\n 'AUTH_TYPE': self._req.ap_auth_type,\n 'CONTENT_LENGTH': self._req.clength, # This may be wrong\n 'CONTENT_TYPE': self._req.content_type, # This may be wrong\n 'GATEWAY_INTERFACE': 'CGI/1.1',\n 'PATH_INFO': self._req.path_info,\n 'PATH_TRANSLATED': None, # Not supported\n 'QUERY_STRING': self._req.args,\n 'REMOTE_ADDR': self._req.connection.remote_ip,\n 'REMOTE_HOST': None, # DNS lookups not supported\n 'REMOTE_IDENT': self._req.connection.remote_logname,\n 'REMOTE_USER': self._req.user,\n 'REQUEST_METHOD': self._req.method,\n 'SCRIPT_NAME': None, # Not supported\n 'SERVER_NAME': self._req.server.server_hostname,\n 'SERVER_PORT': self._req.server.port,\n 'SERVER_PROTOCOL': self._req.protocol,\n 'SERVER_SOFTWARE': 'mod_python'\n }\n for key, value in self._req.headers_in.items():\n key = 'HTTP_' + key.upper().replace('-', '_')\n self._meta[key] = value\n return self._meta\n\n def _get_raw_post_data(self):\n try:\n return self._raw_post_data\n except AttributeError:\n self._raw_post_data = self._req.read()\n return self._raw_post_data\n\n def _get_method(self):\n return self.META['REQUEST_METHOD'].upper()\n\n GET = property(_get_get, _set_get)\n POST = property(_get_post, _set_post)\n COOKIES = property(_get_cookies, _set_cookies)\n FILES = property(_get_files)\n META = property(_get_meta)\n REQUEST = property(_get_request)\n raw_post_data = property(_get_raw_post_data)\n method = property(_get_method)\n\nclass ModPythonHandler(BaseHandler):\n request_class = ModPythonRequest\n\n def __call__(self, req):\n # mod_python fakes the environ, and thus doesn't process SetEnv. This fixes that\n os.environ.update(req.subprocess_env)\n\n # now that the environ works we can see the correct settings, so imports\n # that use settings now can work\n from django.conf import settings\n\n # if we need to set up middleware, now that settings works we can do it now.\n if self._request_middleware is None:\n self.load_middleware()\n\n dispatcher.send(signal=signals.request_started)\n try:\n try:\n request = self.request_class(req)\n except UnicodeDecodeError:\n response = http.HttpResponseBadRequest()\n else:\n response = self.get_response(request)\n\n # Apply response middleware\n for middleware_method in self._response_middleware:\n response = middleware_method(request, response)\n response = self.apply_response_fixes(request, response)\n finally:\n dispatcher.send(signal=signals.request_finished)\n\n # Convert our custom HttpResponse object back into the mod_python req.\n req.content_type = response['Content-Type']\n for key, value in response.items():\n if key != 'content-type':\n req.headers_out[str(key)] = str(value)\n for c in response.cookies.values():\n req.headers_out.add('Set-Cookie', c.output(header=''))\n req.status = response.status_code\n try:\n for chunk in response:\n req.write(chunk)\n finally:\n response.close()\n\n return 0 # mod_python.apache.OK\n\ndef handler(req):\n # mod_python hooks into this function.\n return ModPythonHandler()(req)\n"}, "files_after": {"django/bin/compile-messages.py": "#!/usr/bin/python\n\nimport os\nimport sys\nimport getopt\n\nbasedir = None\n\nif os.path.isdir(os.path.join('conf', 'locale')):\n basedir = os.path.abspath(os.path.join('conf', 'locale'))\nelif os.path.isdir('locale'):\n basedir = os.path.abspath('locale')\nelse:\n print \"this script should be run from the django svn tree or your project or app tree\"\n sys.exit(1)\n\nfor (dirpath, dirnames, filenames) in os.walk(basedir):\n for file in filenames:\n if file.endswith('.po'):\n sys.stderr.write('processing file %s in %s\\n' % (file, dirpath))\n pf = os.path.splitext(os.path.join(dirpath, file))[0]\n # Store the names of the .mo and .po files in an environment\n # variable, rather than doing a string replacement into the\n # command, so that we can take advantage of shell quoting, to\n # quote any malicious characters/escaping.\n # See http://cyberelk.net/tim/articles/cmdline/ar01s02.html\n os.environ['djangocompilemo'] = pf + '.mo'\n os.environ['djangocompilepo'] = pf + '.po'\n cmd = 'msgfmt -o \"$djangocompilemo\" \"$djangocompilepo\"'\n os.system(cmd)\n\n", "django/core/db/backends/postgresql.py": "\"\"\"\nPostgreSQL database backend for Django.\n\nRequires psycopg 1: http://initd.org/projects/psycopg1\n\"\"\"\n\nfrom django.core.db import base, typecasts\nimport psycopg as Database\n\nDatabaseError = Database.DatabaseError\n\ndef smart_basestring(s, charset):\n if isinstance(s, unicode):\n return s.encode(charset)\n return s\n\nclass UnicodeCursorWrapper(object):\n \"\"\"\n A thin wrapper around psycopg cursors that allows them to accept Unicode\n strings as params.\n\n This is necessary because psycopg doesn't apply any DB quoting to\n parameters that are Unicode strings. If a param is Unicode, this will\n convert it to a bytestring using DEFAULT_CHARSET before passing it to\n psycopg.\n \"\"\"\n def __init__(self, cursor, charset):\n self.cursor = cursor\n self.charset = charset\n\n def execute(self, sql, params=()):\n return self.cursor.execute(sql, [smart_basestring(p, self.charset) for p in params])\n\n def executemany(self, sql, param_list):\n new_param_list = [tuple([smart_basestring(p, self.charset) for p in params]) for params in param_list]\n return self.cursor.executemany(sql, new_param_list)\n\n def __getattr__(self, attr):\n if self.__dict__.has_key(attr):\n return self.__dict__[attr]\n else:\n return getattr(self.cursor, attr)\n\nclass DatabaseWrapper:\n def __init__(self):\n self.connection = None\n self.queries = []\n\n def cursor(self):\n from django.conf.settings import DATABASE_USER, DATABASE_NAME, DATABASE_HOST, DATABASE_PORT, DATABASE_PASSWORD, DEBUG, DEFAULT_CHARSET, TIME_ZONE\n if self.connection is None:\n if DATABASE_NAME == '':\n from django.core.exceptions import ImproperlyConfigured\n raise ImproperlyConfigured, \"You need to specify DATABASE_NAME in your Django settings file.\"\n conn_string = \"dbname=%s\" % DATABASE_NAME\n if DATABASE_USER:\n conn_string = \"user=%s %s\" % (DATABASE_USER, conn_string)\n if DATABASE_PASSWORD:\n conn_string += \" password='%s'\" % DATABASE_PASSWORD\n if DATABASE_HOST:\n conn_string += \" host=%s\" % DATABASE_HOST\n if DATABASE_PORT:\n conn_string += \" port=%s\" % DATABASE_PORT\n self.connection = Database.connect(conn_string)\n self.connection.set_isolation_level(1) # make transactions transparent to all cursors\n cursor = self.connection.cursor()\n cursor.execute(\"SET TIME ZONE %s\", [TIME_ZONE])\n cursor = UnicodeCursorWrapper(cursor, DEFAULT_CHARSET)\n if DEBUG:\n return base.CursorDebugWrapper(cursor, self)\n return cursor\n\n def commit(self):\n return self.connection.commit()\n\n def rollback(self):\n if self.connection:\n return self.connection.rollback()\n\n def close(self):\n if self.connection is not None:\n self.connection.close()\n self.connection = None\n\n def quote_name(self, name):\n if name.startswith('\"') and name.endswith('\"'):\n return name # Quoting once is enough.\n return '\"%s\"' % name\n\ndef dictfetchone(cursor):\n \"Returns a row from the cursor as a dict\"\n return cursor.dictfetchone()\n\ndef dictfetchmany(cursor, number):\n \"Returns a certain number of rows from a cursor as a dict\"\n return cursor.dictfetchmany(number)\n\ndef dictfetchall(cursor):\n \"Returns all rows from a cursor as a dict\"\n return cursor.dictfetchall()\n\ndef get_last_insert_id(cursor, table_name, pk_name):\n cursor.execute(\"SELECT CURRVAL('%s_%s_seq')\" % (table_name, pk_name))\n return cursor.fetchone()[0]\n\ndef get_date_extract_sql(lookup_type, table_name):\n # lookup_type is 'year', 'month', 'day'\n # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT\n return \"EXTRACT('%s' FROM %s)\" % (lookup_type, table_name)\n\ndef get_date_trunc_sql(lookup_type, field_name):\n # lookup_type is 'year', 'month', 'day'\n # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC\n return \"DATE_TRUNC('%s', %s)\" % (lookup_type, field_name)\n\ndef get_limit_offset_sql(limit, offset=None):\n sql = \"LIMIT %s\" % limit\n if offset and offset != 0:\n sql += \" OFFSET %s\" % offset\n return sql\n\ndef get_random_function_sql():\n return \"RANDOM()\"\n\ndef get_table_list(cursor):\n \"Returns a list of table names in the current database.\"\n cursor.execute(\"\"\"\n SELECT c.relname\n FROM pg_catalog.pg_class c\n LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n WHERE c.relkind IN ('r', 'v', '')\n AND n.nspname NOT IN ('pg_catalog', 'pg_toast')\n AND pg_catalog.pg_table_is_visible(c.oid)\"\"\")\n return [row[0] for row in cursor.fetchall()]\n\ndef get_relations(cursor, table_name):\n \"\"\"\n Returns a dictionary of {field_index: (field_index_other_table, other_table)}\n representing all relationships to the given table. Indexes are 0-based.\n \"\"\"\n cursor.execute(\"\"\"\n SELECT con.conkey, con.confkey, c2.relname\n FROM pg_constraint con, pg_class c1, pg_class c2\n WHERE c1.oid = con.conrelid\n AND c2.oid = con.confrelid\n AND c1.relname = %s\n AND con.contype = 'f'\"\"\", [table_name])\n relations = {}\n for row in cursor.fetchall():\n try:\n # row[0] and row[1] are like \"{2}\", so strip the curly braces.\n relations[int(row[0][1:-1]) - 1] = (int(row[1][1:-1]) - 1, row[2])\n except ValueError:\n continue\n return relations\n\n# Register these custom typecasts, because Django expects dates/times to be\n# in Python's native (standard-library) datetime/time format, whereas psycopg\n# use mx.DateTime by default.\ntry:\n Database.register_type(Database.new_type((1082,), \"DATE\", typecasts.typecast_date))\nexcept AttributeError:\n raise Exception, \"You appear to be using psycopg version 2, which isn't supported yet, because it's still in beta. Use psycopg version 1 instead: http://initd.org/projects/psycopg1\"\nDatabase.register_type(Database.new_type((1083,1266), \"TIME\", typecasts.typecast_time))\nDatabase.register_type(Database.new_type((1114,1184), \"TIMESTAMP\", typecasts.typecast_timestamp))\nDatabase.register_type(Database.new_type((16,), \"BOOLEAN\", typecasts.typecast_boolean))\n\nOPERATOR_MAPPING = {\n 'exact': '=',\n 'iexact': 'ILIKE',\n 'contains': 'LIKE',\n 'icontains': 'ILIKE',\n 'ne': '!=',\n 'gt': '>',\n 'gte': '>=',\n 'lt': '<',\n 'lte': '<=',\n 'startswith': 'LIKE',\n 'endswith': 'LIKE',\n 'istartswith': 'ILIKE',\n 'iendswith': 'ILIKE',\n}\n\n# This dictionary maps Field objects to their associated PostgreSQL column\n# types, as strings. Column-type strings can contain format strings; they'll\n# be interpolated against the values of Field.__dict__ before being output.\n# If a column type is set to None, it won't be included in the output.\nDATA_TYPES = {\n 'AutoField': 'serial',\n 'BooleanField': 'boolean',\n 'CharField': 'varchar(%(maxlength)s)',\n 'CommaSeparatedIntegerField': 'varchar(%(maxlength)s)',\n 'DateField': 'date',\n 'DateTimeField': 'timestamp with time zone',\n 'EmailField': 'varchar(75)',\n 'FileField': 'varchar(100)',\n 'FilePathField': 'varchar(100)',\n 'FloatField': 'numeric(%(max_digits)s, %(decimal_places)s)',\n 'ImageField': 'varchar(100)',\n 'IntegerField': 'integer',\n 'IPAddressField': 'inet',\n 'ManyToManyField': None,\n 'NullBooleanField': 'boolean',\n 'OneToOneField': 'integer',\n 'PhoneNumberField': 'varchar(20)',\n 'PositiveIntegerField': 'integer CHECK (%(column)s >= 0)',\n 'PositiveSmallIntegerField': 'smallint CHECK (%(column)s >= 0)',\n 'SlugField': 'varchar(50)',\n 'SmallIntegerField': 'smallint',\n 'TextField': 'text',\n 'TimeField': 'time',\n 'URLField': 'varchar(200)',\n 'USStateField': 'varchar(2)',\n}\n\n# Maps type codes to Django Field types.\nDATA_TYPES_REVERSE = {\n 16: 'BooleanField',\n 21: 'SmallIntegerField',\n 23: 'IntegerField',\n 25: 'TextField',\n 869: 'IPAddressField',\n 1043: 'CharField',\n 1082: 'DateField',\n 1083: 'TimeField',\n 1114: 'DateTimeField',\n 1184: 'DateTimeField',\n 1266: 'TimeField',\n 1700: 'FloatField',\n}\n", "django/core/handlers/modpython.py": "from django.core.handlers.base import BaseHandler\nfrom django.utils import datastructures, httpwrappers\nfrom pprint import pformat\nimport os\n\n# NOTE: do *not* import settings (or any module which eventually imports\n# settings) until after ModPythonHandler has been called; otherwise os.environ\n# won't be set up correctly (with respect to settings).\n\nclass ModPythonRequest(httpwrappers.HttpRequest):\n def __init__(self, req):\n self._req = req\n self.path = req.uri\n\n def __repr__(self):\n return '' % \\\n (self.path, pformat(self.GET), pformat(self.POST), pformat(self.COOKIES),\n pformat(self.META), pformat(self.user))\n\n def get_full_path(self):\n return '%s%s' % (self.path, self._req.args and ('?' + self._req.args) or '')\n\n def _load_post_and_files(self):\n \"Populates self._post and self._files\"\n if self._req.headers_in.has_key('content-type') and self._req.headers_in['content-type'].startswith('multipart'):\n self._post, self._files = httpwrappers.parse_file_upload(self._req.headers_in, self.raw_post_data)\n else:\n self._post, self._files = httpwrappers.QueryDict(self.raw_post_data), datastructures.MultiValueDict()\n\n def _get_request(self):\n if not hasattr(self, '_request'):\n self._request = datastructures.MergeDict(self.POST, self.GET)\n return self._request\n\n def _get_get(self):\n if not hasattr(self, '_get'):\n self._get = httpwrappers.QueryDict(self._req.args)\n return self._get\n\n def _set_get(self, get):\n self._get = get\n\n def _get_post(self):\n if not hasattr(self, '_post'):\n self._load_post_and_files()\n return self._post\n\n def _set_post(self, post):\n self._post = post\n\n def _get_cookies(self):\n if not hasattr(self, '_cookies'):\n self._cookies = httpwrappers.parse_cookie(self._req.headers_in.get('cookie', ''))\n return self._cookies\n\n def _set_cookies(self, cookies):\n self._cookies = cookies\n\n def _get_files(self):\n if not hasattr(self, '_files'):\n self._load_post_and_files()\n return self._files\n\n def _get_meta(self):\n \"Lazy loader that returns self.META dictionary\"\n if not hasattr(self, '_meta'):\n self._meta = {\n 'AUTH_TYPE': self._req.ap_auth_type,\n 'CONTENT_LENGTH': self._req.clength, # This may be wrong\n 'CONTENT_TYPE': self._req.content_type, # This may be wrong\n 'GATEWAY_INTERFACE': 'CGI/1.1',\n 'PATH_INFO': self._req.path_info,\n 'PATH_TRANSLATED': None, # Not supported\n 'QUERY_STRING': self._req.args,\n 'REMOTE_ADDR': self._req.connection.remote_ip,\n 'REMOTE_HOST': None, # DNS lookups not supported\n 'REMOTE_IDENT': self._req.connection.remote_logname,\n 'REMOTE_USER': self._req.user,\n 'REQUEST_METHOD': self._req.method,\n 'SCRIPT_NAME': None, # Not supported\n 'SERVER_NAME': self._req.server.server_hostname,\n 'SERVER_PORT': self._req.server.port,\n 'SERVER_PROTOCOL': self._req.protocol,\n 'SERVER_SOFTWARE': 'mod_python'\n }\n for key, value in self._req.headers_in.items():\n key = 'HTTP_' + key.upper().replace('-', '_')\n self._meta[key] = value\n return self._meta\n\n def _get_raw_post_data(self):\n try:\n return self._raw_post_data\n except AttributeError:\n self._raw_post_data = self._req.read()\n return self._raw_post_data\n\n def _get_user(self):\n if not hasattr(self, '_user'):\n from django.models.auth import users\n try:\n user_id = self.session[users.SESSION_KEY]\n if not user_id:\n raise ValueError\n self._user = users.get_object(pk=user_id)\n except (AttributeError, KeyError, ValueError, users.UserDoesNotExist):\n from django.parts.auth import anonymoususers\n self._user = anonymoususers.AnonymousUser()\n return self._user\n\n def _set_user(self, user):\n self._user = user\n\n GET = property(_get_get, _set_get)\n POST = property(_get_post, _set_post)\n COOKIES = property(_get_cookies, _set_cookies)\n FILES = property(_get_files)\n META = property(_get_meta)\n REQUEST = property(_get_request)\n raw_post_data = property(_get_raw_post_data)\n user = property(_get_user, _set_user)\n\nclass ModPythonHandler(BaseHandler):\n def __call__(self, req):\n # mod_python fakes the environ, and thus doesn't process SetEnv. This fixes that\n os.environ.update(req.subprocess_env)\n\n # now that the environ works we can see the correct settings, so imports\n # that use settings now can work\n from django.conf import settings\n from django.core import db\n\n # if we need to set up middleware, now that settings works we can do it now.\n if self._request_middleware is None:\n self.load_middleware()\n\n try:\n request = ModPythonRequest(req)\n response = self.get_response(req.uri, request)\n # Apply response middleware\n for middleware_method in self._response_middleware:\n response = middleware_method(request, response)\n finally:\n db.db.close()\n\n # Convert our custom HttpResponse object back into the mod_python req.\n populate_apache_request(response, req)\n return 0 # mod_python.apache.OK\n\ndef populate_apache_request(http_response, mod_python_req):\n \"Populates the mod_python request object with an HttpResponse\"\n from django.conf import settings\n mod_python_req.content_type = http_response['Content-Type']\n for key, value in http_response.headers.items():\n if key != 'Content-Type':\n mod_python_req.headers_out[key] = value\n for c in http_response.cookies.values():\n mod_python_req.headers_out.add('Set-Cookie', c.output(header=''))\n mod_python_req.status = http_response.status_code\n mod_python_req.write(http_response.get_content_as_string(settings.DEFAULT_CHARSET))\n\ndef handler(req):\n # mod_python hooks into this function.\n return ModPythonHandler()(req)\n", "django/core/meta/__init__.py": "from django.conf import settings\nfrom django.core import formfields, validators\nfrom django.core import db\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.core.meta.fields import *\nfrom django.utils.functional import curry\nfrom django.utils.text import capfirst\nimport copy, datetime, os, re, sys, types\n\n# Admin stages.\nADD, CHANGE, BOTH = 1, 2, 3\n\n# Size of each \"chunk\" for get_iterator calls.\n# Larger values are slightly faster at the expense of more storage space.\nGET_ITERATOR_CHUNK_SIZE = 100\n\n# Prefix (in Python path style) to location of models.\nMODEL_PREFIX = 'django.models'\n\n# Methods on models with the following prefix will be removed and\n# converted to module-level functions.\nMODEL_FUNCTIONS_PREFIX = '_module_'\n\n# Methods on models with the following prefix will be removed and\n# converted to manipulator methods.\nMANIPULATOR_FUNCTIONS_PREFIX = '_manipulator_'\n\nLOOKUP_SEPARATOR = '__'\n\n####################\n# HELPER FUNCTIONS #\n####################\n\n# Django currently supports two forms of ordering.\n# Form 1 (deprecated) example:\n# order_by=(('pub_date', 'DESC'), ('headline', 'ASC'), (None, 'RANDOM'))\n# Form 2 (new-style) example:\n# order_by=('-pub_date', 'headline', '?')\n# Form 1 is deprecated and will no longer be supported for Django's first\n# official release. The following code converts from Form 1 to Form 2.\n\nLEGACY_ORDERING_MAPPING = {'ASC': '_', 'DESC': '-_', 'RANDOM': '?'}\n\ndef handle_legacy_orderlist(order_list):\n if not order_list or isinstance(order_list[0], basestring):\n return order_list\n else:\n import warnings\n new_order_list = [LEGACY_ORDERING_MAPPING[j.upper()].replace('_', str(i)) for i, j in order_list]\n warnings.warn(\"%r ordering syntax is deprecated. Use %r instead.\" % (order_list, new_order_list), DeprecationWarning)\n return new_order_list\n\ndef orderfield2column(f, opts):\n try:\n return opts.get_field(f, False).column\n except FieldDoesNotExist:\n return f\n\ndef orderlist2sql(order_list, opts, prefix=''):\n if prefix.endswith('.'):\n prefix = db.db.quote_name(prefix[:-1]) + '.'\n output = []\n for f in handle_legacy_orderlist(order_list):\n if f.startswith('-'):\n output.append('%s%s DESC' % (prefix, db.db.quote_name(orderfield2column(f[1:], opts))))\n elif f == '?':\n output.append(db.get_random_function_sql())\n else:\n output.append('%s%s ASC' % (prefix, db.db.quote_name(orderfield2column(f, opts))))\n return ', '.join(output)\n\ndef get_module(app_label, module_name):\n return __import__('%s.%s.%s' % (MODEL_PREFIX, app_label, module_name), '', '', [''])\n\ndef get_app(app_label):\n return __import__('%s.%s' % (MODEL_PREFIX, app_label), '', '', [''])\n\n_installed_models_cache = None\ndef get_installed_models():\n \"\"\"\n Returns a list of installed \"models\" packages, such as foo.models,\n ellington.news.models, etc. This does NOT include django.models.\n \"\"\"\n global _installed_models_cache\n if _installed_models_cache is not None:\n return _installed_models_cache\n _installed_models_cache = []\n for a in settings.INSTALLED_APPS:\n try:\n _installed_models_cache.append(__import__(a + '.models', '', '', ['']))\n except ImportError:\n pass\n return _installed_models_cache\n\n_installed_modules_cache = None\ndef get_installed_model_modules(core_models=None):\n \"\"\"\n Returns a list of installed models, such as django.models.core,\n ellington.news.models.news, foo.models.bar, etc.\n \"\"\"\n global _installed_modules_cache\n if _installed_modules_cache is not None:\n return _installed_modules_cache\n _installed_modules_cache = []\n\n # django.models is a special case.\n for submodule in (core_models or []):\n _installed_modules_cache.append(__import__('django.models.%s' % submodule, '', '', ['']))\n for m in get_installed_models():\n for submodule in getattr(m, '__all__', []):\n mod = __import__('django.models.%s' % submodule, '', '', [''])\n try:\n mod._MODELS\n except AttributeError:\n pass # Skip model modules that don't actually have models in them.\n else:\n _installed_modules_cache.append(mod)\n return _installed_modules_cache\n\nclass LazyDate:\n \"\"\"\n Use in limit_choices_to to compare the field to dates calculated at run time\n instead of when the model is loaded. For example::\n\n ... limit_choices_to = {'date__gt' : meta.LazyDate(days=-3)} ...\n\n which will limit the choices to dates greater than three days ago.\n \"\"\"\n def __init__(self, **kwargs):\n self.delta = datetime.timedelta(**kwargs)\n\n def __str__(self):\n return str(self.__get_value__())\n\n def __repr__(self):\n return \"\" % self.delta\n\n def __get_value__(self):\n return datetime.datetime.now() + self.delta\n\n################\n# MAIN CLASSES #\n################\n\nclass FieldDoesNotExist(Exception):\n pass\n\nclass BadKeywordArguments(Exception):\n pass\n\nclass Options:\n def __init__(self, module_name='', verbose_name='', verbose_name_plural='', db_table='',\n fields=None, ordering=None, unique_together=None, admin=None, has_related_links=False,\n where_constraints=None, object_name=None, app_label=None,\n exceptions=None, permissions=None, get_latest_by=None,\n order_with_respect_to=None, module_constants=None):\n\n # Save the original function args, for use by copy(). Note that we're\n # NOT using copy.deepcopy(), because that would create a new copy of\n # everything in memory, and it's better to conserve memory. Of course,\n # this comes with the important gotcha that changing any attribute of\n # this object will change its value in self._orig_init_args, so we\n # need to be careful not to do that. In practice, we can pull this off\n # because Options are generally read-only objects, and __init__() is\n # the only place where its attributes are manipulated.\n\n # locals() is used purely for convenience, so we don't have to do\n # something verbose like this:\n # self._orig_init_args = {\n # 'module_name': module_name,\n # 'verbose_name': verbose_name,\n # ...\n # }\n self._orig_init_args = locals()\n del self._orig_init_args['self'] # because we don't care about it.\n\n # Move many-to-many related fields from self.fields into self.many_to_many.\n self.fields, self.many_to_many = [], []\n for field in (fields or []):\n if field.rel and isinstance(field.rel, ManyToMany):\n self.many_to_many.append(field)\n else:\n self.fields.append(field)\n self.module_name, self.verbose_name = module_name, verbose_name\n self.verbose_name_plural = verbose_name_plural or verbose_name + 's'\n self.db_table, self.has_related_links = db_table, has_related_links\n self.ordering = ordering or []\n self.unique_together = unique_together or []\n self.where_constraints = where_constraints or []\n self.exceptions = exceptions or []\n self.permissions = permissions or []\n self.object_name, self.app_label = object_name, app_label\n self.get_latest_by = get_latest_by\n if order_with_respect_to:\n self.order_with_respect_to = self.get_field(order_with_respect_to)\n self.ordering = ('_order',)\n else:\n self.order_with_respect_to = None\n self.module_constants = module_constants or {}\n self.admin = admin\n\n # Calculate one_to_one_field.\n self.one_to_one_field = None\n for f in self.fields:\n if isinstance(f.rel, OneToOne):\n self.one_to_one_field = f\n break\n # Cache the primary-key field.\n self.pk = None\n for f in self.fields:\n if f.primary_key:\n self.pk = f\n break\n # If a primary_key field hasn't been specified, add an\n # auto-incrementing primary-key ID field automatically.\n if self.pk is None:\n self.fields.insert(0, AutoField(name='id', verbose_name='ID', primary_key=True))\n self.pk = self.fields[0]\n # Cache whether this has an AutoField.\n self.has_auto_field = False\n for f in self.fields:\n is_auto = isinstance(f, AutoField)\n if is_auto and self.has_auto_field:\n raise AssertionError, \"A model can't have more than one AutoField.\"\n elif is_auto:\n self.has_auto_field = True\n\n def __repr__(self):\n return '' % self.module_name\n\n def copy(self, **kwargs):\n args = self._orig_init_args.copy()\n args.update(kwargs)\n return self.__class__(**args)\n\n def get_model_module(self):\n return get_module(self.app_label, self.module_name)\n\n def get_content_type_id(self):\n \"Returns the content-type ID for this object type.\"\n if not hasattr(self, '_content_type_id'):\n mod = get_module('core', 'contenttypes')\n self._content_type_id = mod.get_object(python_module_name__exact=self.module_name, package__label__exact=self.app_label).id\n return self._content_type_id\n\n def get_field(self, name, many_to_many=True):\n \"\"\"\n Returns the requested field by name. Raises FieldDoesNotExist on error.\n \"\"\"\n to_search = many_to_many and (self.fields + self.many_to_many) or self.fields\n for f in to_search:\n if f.name == name:\n return f\n raise FieldDoesNotExist, \"name=%s\" % name\n\n def get_order_sql(self, table_prefix=''):\n \"Returns the full 'ORDER BY' clause for this object, according to self.ordering.\"\n if not self.ordering: return ''\n pre = table_prefix and (table_prefix + '.') or ''\n return 'ORDER BY ' + orderlist2sql(self.ordering, self, pre)\n\n def get_add_permission(self):\n return 'add_%s' % self.object_name.lower()\n\n def get_change_permission(self):\n return 'change_%s' % self.object_name.lower()\n\n def get_delete_permission(self):\n return 'delete_%s' % self.object_name.lower()\n\n def get_rel_object_method_name(self, rel_opts, rel_field):\n # This method encapsulates the logic that decides what name to give a\n # method that retrieves related many-to-one objects. Usually it just\n # uses the lower-cased object_name, but if the related object is in\n # another app, its app_label is appended.\n #\n # Examples:\n #\n # # Normal case -- a related object in the same app.\n # # This method returns \"choice\".\n # Poll.get_choice_list()\n #\n # # A related object in a different app.\n # # This method returns \"lcom_bestofaward\".\n # Place.get_lcom_bestofaward_list() # \"lcom_bestofaward\"\n rel_obj_name = rel_field.rel.related_name or rel_opts.object_name.lower()\n if self.app_label != rel_opts.app_label:\n rel_obj_name = '%s_%s' % (rel_opts.app_label, rel_obj_name)\n return rel_obj_name\n\n def get_all_related_objects(self):\n try: # Try the cache first.\n return self._all_related_objects\n except AttributeError:\n module_list = get_installed_model_modules()\n rel_objs = []\n for mod in module_list:\n for klass in mod._MODELS:\n for f in klass._meta.fields:\n if f.rel and self == f.rel.to:\n rel_objs.append((klass._meta, f))\n if self.has_related_links:\n # Manually add RelatedLink objects, which are a special case.\n relatedlinks = get_module('relatedlinks', 'relatedlinks')\n # Note that the copy() is very important -- otherwise any\n # subsequently loaded object with related links will override this\n # relationship we're adding.\n link_field = copy.copy(relatedlinks.RelatedLink._meta.get_field('object_id'))\n link_field.rel = ManyToOne(self.get_model_module().Klass, 'id',\n num_in_admin=3, min_num_in_admin=3, edit_inline=TABULAR,\n lookup_overrides={\n 'content_type__package__label__exact': self.app_label,\n 'content_type__python_module_name__exact': self.module_name,\n })\n rel_objs.append((relatedlinks.RelatedLink._meta, link_field))\n self._all_related_objects = rel_objs\n return rel_objs\n\n def get_inline_related_objects(self):\n return [(a, b) for a, b in self.get_all_related_objects() if b.rel.edit_inline]\n\n def get_all_related_many_to_many_objects(self):\n module_list = get_installed_model_modules()\n rel_objs = []\n for mod in module_list:\n for klass in mod._MODELS:\n try:\n for f in klass._meta.many_to_many:\n if f.rel and self == f.rel.to:\n rel_objs.append((klass._meta, f))\n raise StopIteration\n except StopIteration:\n continue\n return rel_objs\n\n def get_ordered_objects(self):\n \"Returns a list of Options objects that are ordered with respect to this object.\"\n if not hasattr(self, '_ordered_objects'):\n objects = []\n for klass in get_app(self.app_label)._MODELS:\n opts = klass._meta\n if opts.order_with_respect_to and opts.order_with_respect_to.rel \\\n and self == opts.order_with_respect_to.rel.to:\n objects.append(opts)\n self._ordered_objects = objects\n return self._ordered_objects\n\n def has_field_type(self, field_type):\n \"\"\"\n Returns True if this object's admin form has at least one of the given\n field_type (e.g. FileField).\n \"\"\"\n if not hasattr(self, '_field_types'):\n self._field_types = {}\n if not self._field_types.has_key(field_type):\n try:\n # First check self.fields.\n for f in self.fields:\n if isinstance(f, field_type):\n raise StopIteration\n # Failing that, check related fields.\n for rel_obj, rel_field in self.get_inline_related_objects():\n for f in rel_obj.fields:\n if isinstance(f, field_type):\n raise StopIteration\n except StopIteration:\n self._field_types[field_type] = True\n else:\n self._field_types[field_type] = False\n return self._field_types[field_type]\n\ndef _reassign_globals(function_dict, extra_globals, namespace):\n new_functions = {}\n for k, v in function_dict.items():\n # Get the code object.\n code = v.func_code\n # Recreate the function, but give it access to extra_globals and the\n # given namespace's globals, too.\n new_globals = {'__builtins__': __builtins__, 'db': db.db, 'datetime': datetime}\n new_globals.update(extra_globals.__dict__)\n func = types.FunctionType(code, globals=new_globals, name=k, argdefs=v.func_defaults)\n func.__dict__.update(v.__dict__)\n setattr(namespace, k, func)\n # For all of the custom functions that have been added so far, give\n # them access to the new function we've just created.\n for new_k, new_v in new_functions.items():\n new_v.func_globals[k] = func\n new_functions[k] = func\n\n# Calculate the module_name using a poor-man's pluralization.\nget_module_name = lambda class_name: class_name.lower() + 's'\n\n# Calculate the verbose_name by converting from InitialCaps to \"lowercase with spaces\".\nget_verbose_name = lambda class_name: re.sub('([A-Z])', ' \\\\1', class_name).lower().strip()\n\nclass ModelBase(type):\n \"Metaclass for all models\"\n def __new__(cls, name, bases, attrs):\n # If this isn't a subclass of Model, don't do anything special.\n if not bases:\n return type.__new__(cls, name, bases, attrs)\n\n try:\n meta_attrs = attrs.pop('META').__dict__\n del meta_attrs['__module__']\n del meta_attrs['__doc__']\n except KeyError:\n meta_attrs = {}\n\n # Gather all attributes that are Field instances.\n fields = []\n for obj_name, obj in attrs.items():\n if isinstance(obj, Field):\n obj.set_name(obj_name)\n fields.append(obj)\n del attrs[obj_name]\n\n # Sort the fields in the order that they were created. The\n # \"creation_counter\" is needed because metaclasses don't preserve the\n # attribute order.\n fields.sort(lambda x, y: x.creation_counter - y.creation_counter)\n\n # If this model is a subclass of another model, create an Options\n # object by first copying the base class's _meta and then updating it\n # with the overrides from this class.\n replaces_module = None\n if bases[0] != Model:\n field_names = [f.name for f in fields]\n remove_fields = meta_attrs.pop('remove_fields', [])\n for f in bases[0]._meta._orig_init_args['fields']:\n if f.name not in field_names and f.name not in remove_fields:\n fields.insert(0, f)\n if meta_attrs.has_key('replaces_module'):\n # Set the replaces_module variable for now. We can't actually\n # do anything with it yet, because the module hasn't yet been\n # created.\n replaces_module = meta_attrs.pop('replaces_module').split('.')\n # Pass any Options overrides to the base's Options instance, and\n # simultaneously remove them from attrs. When this is done, attrs\n # will be a dictionary of custom methods, plus __module__.\n meta_overrides = {'fields': fields, 'module_name': get_module_name(name), 'verbose_name': get_verbose_name(name)}\n for k, v in meta_attrs.items():\n if not callable(v) and k != '__module__':\n meta_overrides[k] = meta_attrs.pop(k)\n opts = bases[0]._meta.copy(**meta_overrides)\n opts.object_name = name\n del meta_overrides\n else:\n opts = Options(\n module_name = meta_attrs.pop('module_name', get_module_name(name)),\n # If the verbose_name wasn't given, use the class name,\n # converted from InitialCaps to \"lowercase with spaces\".\n verbose_name = meta_attrs.pop('verbose_name', get_verbose_name(name)),\n verbose_name_plural = meta_attrs.pop('verbose_name_plural', ''),\n db_table = meta_attrs.pop('db_table', ''),\n fields = fields,\n ordering = meta_attrs.pop('ordering', None),\n unique_together = meta_attrs.pop('unique_together', None),\n admin = meta_attrs.pop('admin', None),\n has_related_links = meta_attrs.pop('has_related_links', False),\n where_constraints = meta_attrs.pop('where_constraints', None),\n object_name = name,\n app_label = meta_attrs.pop('app_label', None),\n exceptions = meta_attrs.pop('exceptions', None),\n permissions = meta_attrs.pop('permissions', None),\n get_latest_by = meta_attrs.pop('get_latest_by', None),\n order_with_respect_to = meta_attrs.pop('order_with_respect_to', None),\n module_constants = meta_attrs.pop('module_constants', None),\n )\n\n if meta_attrs != {}:\n raise TypeError, \"'class META' got invalid attribute(s): %s\" % ','.join(meta_attrs.keys())\n\n # Dynamically create the module that will contain this class and its\n # associated helper functions.\n if replaces_module is not None:\n new_mod = get_module(*replaces_module)\n else:\n new_mod = types.ModuleType(opts.module_name)\n\n # Collect any/all custom class methods and module functions, and move\n # them to a temporary holding variable. We'll deal with them later.\n if replaces_module is not None:\n # Initialize these values to the base class' custom_methods and\n # custom_functions.\n custom_methods = dict([(k, v) for k, v in new_mod.Klass.__dict__.items() if hasattr(v, 'custom')])\n custom_functions = dict([(k, v) for k, v in new_mod.__dict__.items() if hasattr(v, 'custom')])\n else:\n custom_methods, custom_functions = {}, {}\n manipulator_methods = {}\n for k, v in attrs.items():\n if k in ('__module__', '__init__', '_overrides', '__doc__'):\n continue # Skip the important stuff.\n assert callable(v), \"%r is an invalid model parameter.\" % k\n # Give the function a function attribute \"custom\" to designate that\n # it's a custom function/method.\n v.custom = True\n if k.startswith(MODEL_FUNCTIONS_PREFIX):\n custom_functions[k[len(MODEL_FUNCTIONS_PREFIX):]] = v\n elif k.startswith(MANIPULATOR_FUNCTIONS_PREFIX):\n manipulator_methods[k[len(MANIPULATOR_FUNCTIONS_PREFIX):]] = v\n else:\n custom_methods[k] = v\n del attrs[k]\n\n # Create the module-level ObjectDoesNotExist exception.\n dne_exc_name = '%sDoesNotExist' % name\n does_not_exist_exception = types.ClassType(dne_exc_name, (ObjectDoesNotExist,), {})\n # Explicitly set its __module__ because it will initially (incorrectly)\n # be set to the module the code is being executed in.\n does_not_exist_exception.__module__ = MODEL_PREFIX + '.' + opts.module_name\n setattr(new_mod, dne_exc_name, does_not_exist_exception)\n\n # Create other exceptions.\n for exception_name in opts.exceptions:\n exc = types.ClassType(exception_name, (Exception,), {})\n exc.__module__ = MODEL_PREFIX + '.' + opts.module_name # Set this explicitly, as above.\n setattr(new_mod, exception_name, exc)\n\n # Create any module-level constants, if applicable.\n for k, v in opts.module_constants.items():\n setattr(new_mod, k, v)\n\n # Create the default class methods.\n attrs['__init__'] = curry(method_init, opts)\n attrs['__eq__'] = curry(method_eq, opts)\n attrs['save'] = curry(method_save, opts)\n attrs['save'].alters_data = True\n attrs['delete'] = curry(method_delete, opts)\n attrs['delete'].alters_data = True\n\n if opts.order_with_respect_to:\n attrs['get_next_in_order'] = curry(method_get_next_in_order, opts, opts.order_with_respect_to)\n attrs['get_previous_in_order'] = curry(method_get_previous_in_order, opts, opts.order_with_respect_to)\n\n for f in opts.fields:\n # If the object has a relationship to itself, as designated by\n # RECURSIVE_RELATIONSHIP_CONSTANT, create that relationship formally.\n if f.rel and f.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT:\n f.rel.to = opts\n f.name = f.name or (f.rel.to.object_name.lower() + '_' + f.rel.to.pk.name)\n f.verbose_name = f.verbose_name or f.rel.to.verbose_name\n f.rel.field_name = f.rel.field_name or f.rel.to.pk.name\n # Add \"get_thingie\" methods for many-to-one related objects.\n # EXAMPLES: Choice.get_poll(), Story.get_dateline()\n if isinstance(f.rel, ManyToOne):\n func = curry(method_get_many_to_one, f)\n func.__doc__ = \"Returns the associated `%s.%s` object.\" % (f.rel.to.app_label, f.rel.to.module_name)\n attrs['get_%s' % f.name] = func\n\n for f in opts.many_to_many:\n # Add \"get_thingie\" methods for many-to-many related objects.\n # EXAMPLES: Poll.get_site_list(), Story.get_byline_list()\n func = curry(method_get_many_to_many, f)\n func.__doc__ = \"Returns a list of associated `%s.%s` objects.\" % (f.rel.to.app_label, f.rel.to.module_name)\n attrs['get_%s_list' % f.rel.singular] = func\n # Add \"set_thingie\" methods for many-to-many related objects.\n # EXAMPLES: Poll.set_sites(), Story.set_bylines()\n func = curry(method_set_many_to_many, f)\n func.__doc__ = \"Resets this object's `%s.%s` list to the given list of IDs. Note that it doesn't check whether the given IDs are valid.\" % (f.rel.to.app_label, f.rel.to.module_name)\n func.alters_data = True\n attrs['set_%s' % f.name] = func\n\n # Create the class, because we need it to use in currying.\n new_class = type.__new__(cls, name, bases, attrs)\n\n # Give the class a docstring -- its definition.\n if new_class.__doc__ is None:\n new_class.__doc__ = \"%s.%s(%s)\" % (opts.module_name, name, \", \".join([f.name for f in opts.fields]))\n\n # Create the standard, module-level API helper functions such\n # as get_object() and get_list().\n new_mod.get_object = curry(function_get_object, opts, new_class, does_not_exist_exception)\n new_mod.get_object.__doc__ = \"Returns the %s object matching the given parameters.\" % name\n\n new_mod.get_list = curry(function_get_list, opts, new_class)\n new_mod.get_list.__doc__ = \"Returns a list of %s objects matching the given parameters.\" % name\n\n new_mod.get_iterator = curry(function_get_iterator, opts, new_class)\n new_mod.get_iterator.__doc__ = \"Returns an iterator of %s objects matching the given parameters.\" % name\n\n new_mod.get_values = curry(function_get_values, opts, new_class)\n new_mod.get_values.__doc__ = \"Returns a list of dictionaries matching the given parameters.\"\n\n new_mod.get_values_iterator = curry(function_get_values_iterator, opts, new_class)\n new_mod.get_values_iterator.__doc__ = \"Returns an iterator of dictionaries matching the given parameters.\"\n\n new_mod.get_count = curry(function_get_count, opts)\n new_mod.get_count.__doc__ = \"Returns the number of %s objects matching the given parameters.\" % name\n\n new_mod._get_sql_clause = curry(function_get_sql_clause, opts)\n\n new_mod.get_in_bulk = curry(function_get_in_bulk, opts, new_class)\n new_mod.get_in_bulk.__doc__ = \"Returns a dictionary of ID -> %s for the %s objects with IDs in the given id_list.\" % (name, name)\n\n if opts.get_latest_by:\n new_mod.get_latest = curry(function_get_latest, opts, new_class, does_not_exist_exception)\n\n for f in opts.fields:\n if f.choices:\n # Add \"get_thingie_display\" method to get human-readable value.\n func = curry(method_get_display_value, f)\n setattr(new_class, 'get_%s_display' % f.name, func)\n if isinstance(f, DateField) or isinstance(f, DateTimeField):\n # Add \"get_next_by_thingie\" and \"get_previous_by_thingie\" methods\n # for all DateFields and DateTimeFields that cannot be null.\n # EXAMPLES: Poll.get_next_by_pub_date(), Poll.get_previous_by_pub_date()\n if not f.null:\n setattr(new_class, 'get_next_by_%s' % f.name, curry(method_get_next_or_previous, new_mod.get_object, opts, f, True))\n setattr(new_class, 'get_previous_by_%s' % f.name, curry(method_get_next_or_previous, new_mod.get_object, opts, f, False))\n # Add \"get_thingie_list\" for all DateFields and DateTimeFields.\n # EXAMPLE: polls.get_pub_date_list()\n func = curry(function_get_date_list, opts, f)\n func.__doc__ = \"Returns a list of days, months or years (as datetime.datetime objects) in which %s objects are available. The first parameter ('kind') must be one of 'year', 'month' or 'day'.\" % name\n setattr(new_mod, 'get_%s_list' % f.name, func)\n\n elif isinstance(f, FileField):\n setattr(new_class, 'get_%s_filename' % f.name, curry(method_get_file_filename, f))\n setattr(new_class, 'get_%s_url' % f.name, curry(method_get_file_url, f))\n setattr(new_class, 'get_%s_size' % f.name, curry(method_get_file_size, f))\n func = curry(method_save_file, f)\n func.alters_data = True\n setattr(new_class, 'save_%s_file' % f.name, func)\n if isinstance(f, ImageField):\n # Add get_BLAH_width and get_BLAH_height methods, but only\n # if the image field doesn't have width and height cache\n # fields.\n if not f.width_field:\n setattr(new_class, 'get_%s_width' % f.name, curry(method_get_image_width, f))\n if not f.height_field:\n setattr(new_class, 'get_%s_height' % f.name, curry(method_get_image_height, f))\n\n # Add the class itself to the new module we've created.\n new_mod.__dict__[name] = new_class\n\n # Add \"Klass\" -- a shortcut reference to the class.\n new_mod.__dict__['Klass'] = new_class\n\n # Add the Manipulators.\n new_mod.__dict__['AddManipulator'] = get_manipulator(opts, new_class, manipulator_methods, add=True)\n new_mod.__dict__['ChangeManipulator'] = get_manipulator(opts, new_class, manipulator_methods, change=True)\n\n # Now that we have references to new_mod and new_class, we can add\n # any/all extra class methods to the new class. Note that we could\n # have just left the extra methods in attrs (above), but that would\n # have meant that any code within the extra methods would *not* have\n # access to module-level globals, such as get_list(), db, etc.\n # In order to give these methods access to those globals, we have to\n # deconstruct the method getting its raw \"code\" object, then recreating\n # the function with a new \"globals\" dictionary.\n #\n # To complicate matters more, because each method is manually assigned\n # a \"globals\" value, that \"globals\" value does NOT include the methods\n # that haven't been created yet. For instance, if there are two custom\n # methods, foo() and bar(), and foo() is created first, it won't have\n # bar() within its globals(). This is a problem because sometimes\n # custom methods/functions refer to other custom methods/functions. To\n # solve this problem, we keep track of the new functions created (in\n # the new_functions variable) and manually append each new function to\n # the func_globals() of all previously-created functions. So, by the\n # end of the loop, all functions will \"know\" about all the other\n # functions.\n _reassign_globals(custom_methods, new_mod, new_class)\n _reassign_globals(custom_functions, new_mod, new_mod)\n _reassign_globals(manipulator_methods, new_mod, new_mod.__dict__['AddManipulator'])\n _reassign_globals(manipulator_methods, new_mod, new_mod.__dict__['ChangeManipulator'])\n\n if hasattr(new_class, 'get_absolute_url'):\n new_class.get_absolute_url = curry(get_absolute_url, opts, new_class.get_absolute_url)\n\n # Get a reference to the module the class is in, and dynamically add\n # the new module to it.\n app_package = sys.modules.get(new_class.__module__)\n if replaces_module is not None:\n app_label = replaces_module[0]\n else:\n app_package.__dict__[opts.module_name] = new_mod\n app_label = app_package.__name__[app_package.__name__.rfind('.')+1:]\n\n # Populate the _MODELS member on the module the class is in.\n # Example: django.models.polls will have a _MODELS member that will\n # contain this list:\n # [, ]\n # Don't do this if replaces_module is set.\n app_package.__dict__.setdefault('_MODELS', []).append(new_class)\n\n # Cache the app label.\n opts.app_label = app_label\n\n # If the db_table wasn't provided, use the app_label + module_name.\n if not opts.db_table:\n opts.db_table = \"%s_%s\" % (app_label, opts.module_name)\n new_class._meta = opts\n\n # Set the __file__ attribute to the __file__ attribute of its package,\n # because they're technically from the same file. Note: if we didn't\n # set this, sys.modules would think this module was built-in.\n try:\n new_mod.__file__ = app_package.__file__\n except AttributeError:\n # 'module' object has no attribute '__file__', which means the\n # class was probably being entered via the interactive interpreter.\n pass\n\n # Add the module's entry to sys.modules -- for instance,\n # \"django.models.polls.polls\". Note that \"django.models.polls\" has already\n # been added automatically.\n sys.modules.setdefault('%s.%s.%s' % (MODEL_PREFIX, app_label, opts.module_name), new_mod)\n\n # If this module replaces another one, get a reference to the other\n # module's parent, and replace the other module with the one we've just\n # created.\n if replaces_module is not None:\n old_app = get_app(replaces_module[0])\n setattr(old_app, replaces_module[1], new_mod)\n for i, model in enumerate(old_app._MODELS):\n if model._meta.module_name == replaces_module[1]:\n # Replace the appropriate member of the old app's _MODELS\n # data structure.\n old_app._MODELS[i] = new_class\n # Replace all relationships to the old class with\n # relationships to the new one.\n for rel_opts, rel_field in model._meta.get_all_related_objects():\n rel_field.rel.to = opts\n for rel_opts, rel_field in model._meta.get_all_related_many_to_many_objects():\n rel_field.rel.to = opts\n break\n\n return new_class\n\nclass Model:\n __metaclass__ = ModelBase\n\n def __repr__(self):\n return '<%s object>' % self.__class__.__name__\n\n############################################\n# HELPER FUNCTIONS (CURRIED MODEL METHODS) #\n############################################\n\n# CORE METHODS #############################\n\ndef method_init(opts, self, *args, **kwargs):\n if kwargs:\n for f in opts.fields:\n if isinstance(f.rel, ManyToOne):\n try:\n # Assume object instance was passed in.\n rel_obj = kwargs.pop(f.name)\n except KeyError:\n try:\n # Object instance wasn't passed in -- must be an ID.\n val = kwargs.pop(f.attname)\n except KeyError:\n val = f.get_default()\n else:\n # Special case: You can pass in \"None\" for related objects if it's allowed.\n if rel_obj is None and f.null:\n val = None\n else:\n try:\n val = getattr(rel_obj, f.rel.field_name)\n except AttributeError:\n raise TypeError, \"Invalid value: %r should be a %s instance, not a %s\" % (f.name, f.rel.to, type(rel_obj))\n setattr(self, f.attname, val)\n else:\n val = kwargs.pop(f.attname, f.get_default())\n setattr(self, f.attname, val)\n if kwargs:\n raise TypeError, \"'%s' is an invalid keyword argument for this function\" % kwargs.keys()[0]\n for i, arg in enumerate(args):\n setattr(self, opts.fields[i].attname, arg)\n\ndef method_eq(opts, self, other):\n return isinstance(other, self.__class__) and getattr(self, opts.pk.attname) == getattr(other, opts.pk.attname)\n\ndef method_save(opts, self):\n # Run any pre-save hooks.\n if hasattr(self, '_pre_save'):\n self._pre_save()\n non_pks = [f for f in opts.fields if not f.primary_key]\n cursor = db.db.cursor()\n\n # First, try an UPDATE. If that doesn't update anything, do an INSERT.\n pk_val = getattr(self, opts.pk.attname)\n pk_set = bool(pk_val)\n record_exists = True\n if pk_set:\n # Determine whether a record with the primary key already exists.\n cursor.execute(\"SELECT 1 FROM %s WHERE %s=%%s LIMIT 1\" % \\\n (db.db.quote_name(opts.db_table), db.db.quote_name(opts.pk.column)), [pk_val])\n # If it does already exist, do an UPDATE.\n if cursor.fetchone():\n db_values = [f.get_db_prep_save(f.pre_save(getattr(self, f.attname), False)) for f in non_pks]\n cursor.execute(\"UPDATE %s SET %s WHERE %s=%%s\" % \\\n (db.db.quote_name(opts.db_table),\n ','.join(['%s=%%s' % db.db.quote_name(f.column) for f in non_pks]),\n db.db.quote_name(opts.pk.attname)),\n db_values + [pk_val])\n else:\n record_exists = False\n if not pk_set or not record_exists:\n field_names = [db.db.quote_name(f.column) for f in opts.fields if not isinstance(f, AutoField)]\n placeholders = ['%s'] * len(field_names)\n db_values = [f.get_db_prep_save(f.pre_save(getattr(self, f.attname), True)) for f in opts.fields if not isinstance(f, AutoField)]\n if opts.order_with_respect_to:\n field_names.append(db.db.quote_name('_order'))\n # TODO: This assumes the database supports subqueries.\n placeholders.append('(SELECT COUNT(*) FROM %s WHERE %s = %%s)' % \\\n (db.db.quote_name(opts.db_table), db.db.quote_name(opts.order_with_respect_to.column)))\n db_values.append(getattr(self, opts.order_with_respect_to.attname))\n cursor.execute(\"INSERT INTO %s (%s) VALUES (%s)\" % \\\n (db.db.quote_name(opts.db_table), ','.join(field_names),\n ','.join(placeholders)), db_values)\n if opts.has_auto_field:\n setattr(self, opts.pk.attname, db.get_last_insert_id(cursor, opts.db_table, opts.pk.column))\n db.db.commit()\n # Run any post-save hooks.\n if hasattr(self, '_post_save'):\n self._post_save()\n\ndef method_delete(opts, self):\n assert getattr(self, opts.pk.attname) is not None, \"%r can't be deleted because it doesn't have an ID.\"\n # Run any pre-delete hooks.\n if hasattr(self, '_pre_delete'):\n self._pre_delete()\n cursor = db.db.cursor()\n for rel_opts, rel_field in opts.get_all_related_objects():\n rel_opts_name = opts.get_rel_object_method_name(rel_opts, rel_field)\n if isinstance(rel_field.rel, OneToOne):\n try:\n sub_obj = getattr(self, 'get_%s' % rel_opts_name)()\n except ObjectDoesNotExist:\n pass\n else:\n sub_obj.delete()\n else:\n for sub_obj in getattr(self, 'get_%s_list' % rel_opts_name)():\n sub_obj.delete()\n for rel_opts, rel_field in opts.get_all_related_many_to_many_objects():\n cursor.execute(\"DELETE FROM %s WHERE %s=%%s\" % \\\n (db.db.quote_name(rel_field.get_m2m_db_table(rel_opts)),\n db.db.quote_name(self._meta.object_name.lower() + '_id')), [getattr(self, opts.pk.attname)])\n for f in opts.many_to_many:\n cursor.execute(\"DELETE FROM %s WHERE %s=%%s\" % \\\n (db.db.quote_name(f.get_m2m_db_table(opts)),\n db.db.quote_name(self._meta.object_name.lower() + '_id')),\n [getattr(self, opts.pk.attname)])\n cursor.execute(\"DELETE FROM %s WHERE %s=%%s\" % \\\n (db.db.quote_name(opts.db_table), db.db.quote_name(opts.pk.column)),\n [getattr(self, opts.pk.attname)])\n db.db.commit()\n setattr(self, opts.pk.attname, None)\n for f in opts.fields:\n if isinstance(f, FileField) and getattr(self, f.attname):\n file_name = getattr(self, 'get_%s_filename' % f.name)()\n # If the file exists and no other object of this type references it,\n # delete it from the filesystem.\n if os.path.exists(file_name) and not opts.get_model_module().get_list(**{'%s__exact' % f.name: getattr(self, f.name)}):\n os.remove(file_name)\n # Run any post-delete hooks.\n if hasattr(self, '_post_delete'):\n self._post_delete()\n\ndef method_get_next_in_order(opts, order_field, self):\n if not hasattr(self, '_next_in_order_cache'):\n self._next_in_order_cache = opts.get_model_module().get_object(order_by=('_order',),\n where=['%s > (SELECT %s FROM %s WHERE %s=%%s)' % \\\n (db.db.quote_name('_order'), db.db.quote_name('_order'),\n db.db.quote_name(opts.db_table), db.db.quote_name(opts.pk.column)),\n '%s=%%s' % db.db.quote_name(order_field.column)], limit=1,\n params=[getattr(self, opts.pk.attname), getattr(self, order_field.attname)])\n return self._next_in_order_cache\n\ndef method_get_previous_in_order(opts, order_field, self):\n if not hasattr(self, '_previous_in_order_cache'):\n self._previous_in_order_cache = opts.get_model_module().get_object(order_by=('-_order',),\n where=['%s < (SELECT %s FROM %s WHERE %s=%%s)' % \\\n (db.db.quote_name('_order'), db.db.quote_name('_order'),\n db.db.quote_name(opts.db_table), db.db.quote_name(opts.pk.column)),\n '%s=%%s' % db.db.quote_name(order_field.column)], limit=1,\n params=[getattr(self, opts.pk.attname), getattr(self, order_field.attname)])\n return self._previous_in_order_cache\n\n# RELATIONSHIP METHODS #####################\n\n# Example: Story.get_dateline()\ndef method_get_many_to_one(field_with_rel, self):\n cache_var = field_with_rel.get_cache_name()\n if not hasattr(self, cache_var):\n val = getattr(self, field_with_rel.attname)\n mod = field_with_rel.rel.to.get_model_module()\n if val is None:\n raise getattr(mod, '%sDoesNotExist' % field_with_rel.rel.to.object_name)\n retrieved_obj = mod.get_object(**{'%s__exact' % field_with_rel.rel.field_name: val})\n setattr(self, cache_var, retrieved_obj)\n return getattr(self, cache_var)\n\n# Handles getting many-to-many related objects.\n# Example: Poll.get_site_list()\ndef method_get_many_to_many(field_with_rel, self):\n rel = field_with_rel.rel.to\n cache_var = '_%s_cache' % field_with_rel.name\n if not hasattr(self, cache_var):\n mod = rel.get_model_module()\n sql = \"SELECT %s FROM %s a, %s b WHERE a.%s = b.%s AND b.%s = %%s %s\" % \\\n (','.join(['a.%s' % db.db.quote_name(f.column) for f in rel.fields]),\n db.db.quote_name(rel.db_table),\n db.db.quote_name(field_with_rel.get_m2m_db_table(self._meta)),\n db.db.quote_name(rel.pk.column),\n db.db.quote_name(rel.object_name.lower() + '_id'),\n db.db.quote_name(self._meta.object_name.lower() + '_id'), rel.get_order_sql('a'))\n cursor = db.db.cursor()\n cursor.execute(sql, [getattr(self, self._meta.pk.attname)])\n setattr(self, cache_var, [getattr(mod, rel.object_name)(*row) for row in cursor.fetchall()])\n return getattr(self, cache_var)\n\n# Handles setting many-to-many relationships.\n# Example: Poll.set_sites()\ndef method_set_many_to_many(rel_field, self, id_list):\n current_ids = [obj.id for obj in method_get_many_to_many(rel_field, self)]\n ids_to_add, ids_to_delete = dict([(i, 1) for i in id_list]), []\n for current_id in current_ids:\n if current_id in id_list:\n del ids_to_add[current_id]\n else:\n ids_to_delete.append(current_id)\n ids_to_add = ids_to_add.keys()\n # Now ids_to_add is a list of IDs to add, and ids_to_delete is a list of IDs to delete.\n if not ids_to_delete and not ids_to_add:\n return False # No change\n rel = rel_field.rel.to\n m2m_table = rel_field.get_m2m_db_table(self._meta)\n cursor = db.db.cursor()\n this_id = getattr(self, self._meta.pk.attname)\n if ids_to_delete:\n sql = \"DELETE FROM %s WHERE %s = %%s AND %s IN (%s)\" % \\\n (db.db.quote_name(m2m_table),\n db.db.quote_name(self._meta.object_name.lower() + '_id'),\n db.db.quote_name(rel.object_name.lower() + '_id'), ','.join(map(str, ids_to_delete)))\n cursor.execute(sql, [this_id])\n if ids_to_add:\n sql = \"INSERT INTO %s (%s, %s) VALUES (%%s, %%s)\" % \\\n (db.db.quote_name(m2m_table),\n db.db.quote_name(self._meta.object_name.lower() + '_id'),\n db.db.quote_name(rel.object_name.lower() + '_id'))\n cursor.executemany(sql, [(this_id, i) for i in ids_to_add])\n db.db.commit()\n try:\n delattr(self, '_%s_cache' % rel_field.name) # clear cache, if it exists\n except AttributeError:\n pass\n return True\n\n# Handles related-object retrieval.\n# Examples: Poll.get_choice(), Poll.get_choice_list(), Poll.get_choice_count()\ndef method_get_related(method_name, rel_mod, rel_field, self, **kwargs):\n if self._meta.has_related_links and rel_mod.Klass._meta.module_name == 'relatedlinks':\n kwargs['object_id__exact'] = getattr(self, rel_field.rel.field_name)\n else:\n kwargs['%s__%s__exact' % (rel_field.name, rel_field.rel.to.pk.name)] = getattr(self, rel_field.rel.field_name)\n kwargs.update(rel_field.rel.lookup_overrides)\n return getattr(rel_mod, method_name)(**kwargs)\n\n# Handles adding related objects.\n# Example: Poll.add_choice()\ndef method_add_related(rel_obj, rel_mod, rel_field, self, *args, **kwargs):\n init_kwargs = dict(zip([f.attname for f in rel_obj.fields if f != rel_field and not isinstance(f, AutoField)], args))\n init_kwargs.update(kwargs)\n for f in rel_obj.fields:\n if isinstance(f, AutoField):\n init_kwargs[f.attname] = None\n init_kwargs[rel_field.name] = self\n obj = rel_mod.Klass(**init_kwargs)\n obj.save()\n return obj\n\n# Handles related many-to-many object retrieval.\n# Examples: Album.get_song(), Album.get_song_list(), Album.get_song_count()\ndef method_get_related_many_to_many(method_name, opts, rel_mod, rel_field, self, **kwargs):\n kwargs['%s__%s__exact' % (rel_field.name, opts.pk.name)] = getattr(self, opts.pk.attname)\n return getattr(rel_mod, method_name)(**kwargs)\n\n# Handles setting many-to-many related objects.\n# Example: Album.set_songs()\ndef method_set_related_many_to_many(rel_opts, rel_field, self, id_list):\n id_list = map(int, id_list) # normalize to integers\n rel = rel_field.rel.to\n m2m_table = rel_field.get_m2m_db_table(rel_opts)\n this_id = getattr(self, self._meta.pk.attname)\n cursor = db.db.cursor()\n cursor.execute(\"DELETE FROM %s WHERE %s = %%s\" % \\\n (db.db.quote_name(m2m_table),\n db.db.quote_name(rel.object_name.lower() + '_id')), [this_id])\n sql = \"INSERT INTO %s (%s, %s) VALUES (%%s, %%s)\" % \\\n (db.db.quote_name(m2m_table),\n db.db.quote_name(rel.object_name.lower() + '_id'),\n db.db.quote_name(rel_opts.object_name.lower() + '_id'))\n cursor.executemany(sql, [(this_id, i) for i in id_list])\n db.db.commit()\n\n# ORDERING METHODS #########################\n\ndef method_set_order(ordered_obj, self, id_list):\n cursor = db.db.cursor()\n # Example: \"UPDATE poll_choices SET _order = %s WHERE poll_id = %s AND id = %s\"\n sql = \"UPDATE %s SET %s = %%s WHERE %s = %%s AND %s = %%s\" % \\\n (db.db.quote_name(ordered_obj.db_table), db.db.quote_name('_order'),\n db.db.quote_name(ordered_obj.order_with_respect_to.column),\n db.db.quote_name(ordered_obj.pk.column))\n rel_val = getattr(self, ordered_obj.order_with_respect_to.rel.field_name)\n cursor.executemany(sql, [(i, rel_val, j) for i, j in enumerate(id_list)])\n db.db.commit()\n\ndef method_get_order(ordered_obj, self):\n cursor = db.db.cursor()\n # Example: \"SELECT id FROM poll_choices WHERE poll_id = %s ORDER BY _order\"\n sql = \"SELECT %s FROM %s WHERE %s = %%s ORDER BY %s\" % \\\n (db.db.quote_name(ordered_obj.pk.column),\n db.db.quote_name(ordered_obj.db_table),\n db.db.quote_name(ordered_obj.order_with_respect_to.column),\n db.db.quote_name('_order'))\n rel_val = getattr(self, ordered_obj.order_with_respect_to.rel.field_name)\n cursor.execute(sql, [rel_val])\n return [r[0] for r in cursor.fetchall()]\n\n# DATE-RELATED METHODS #####################\n\ndef method_get_next_or_previous(get_object_func, opts, field, is_next, self, **kwargs):\n op = is_next and '>' or '<'\n kwargs.setdefault('where', []).append('(%s %s %%s OR (%s = %%s AND %s %s %%s))' % \\\n (db.db.quote_name(field.column), op, db.db.quote_name(field.column),\n db.db.quote_name(opts.pk.column), op))\n param = str(getattr(self, field.attname))\n kwargs.setdefault('params', []).extend([param, param, getattr(self, opts.pk.attname)])\n kwargs['order_by'] = [(not is_next and '-' or '') + field.name, (not is_next and '-' or '') + opts.pk.name]\n kwargs['limit'] = 1\n return get_object_func(**kwargs)\n\n# CHOICE-RELATED METHODS ###################\n\ndef method_get_display_value(field, self):\n value = getattr(self, field.attname)\n return dict(field.choices).get(value, value)\n\n# FILE-RELATED METHODS #####################\n\ndef method_get_file_filename(field, self):\n return os.path.join(settings.MEDIA_ROOT, getattr(self, field.attname))\n\ndef method_get_file_url(field, self):\n if getattr(self, field.attname): # value is not blank\n import urlparse\n return urlparse.urljoin(settings.MEDIA_URL, getattr(self, field.attname)).replace('\\\\', '/')\n return ''\n\ndef method_get_file_size(field, self):\n return os.path.getsize(method_get_file_filename(field, self))\n\ndef method_save_file(field, self, filename, raw_contents):\n directory = field.get_directory_name()\n try: # Create the date-based directory if it doesn't exist.\n os.makedirs(os.path.join(settings.MEDIA_ROOT, directory))\n except OSError: # Directory probably already exists.\n pass\n filename = field.get_filename(filename)\n\n # If the filename already exists, keep adding an underscore to the name of\n # the file until the filename doesn't exist.\n while os.path.exists(os.path.join(settings.MEDIA_ROOT, filename)):\n try:\n dot_index = filename.rindex('.')\n except ValueError: # filename has no dot\n filename += '_'\n else:\n filename = filename[:dot_index] + '_' + filename[dot_index:]\n\n # Write the file to disk.\n setattr(self, field.attname, filename)\n fp = open(getattr(self, 'get_%s_filename' % field.name)(), 'wb')\n fp.write(raw_contents)\n fp.close()\n\n # Save the width and/or height, if applicable.\n if isinstance(field, ImageField) and (field.width_field or field.height_field):\n from django.utils.images import get_image_dimensions\n width, height = get_image_dimensions(getattr(self, 'get_%s_filename' % field.name)())\n if field.width_field:\n setattr(self, field.width_field, width)\n if field.height_field:\n setattr(self, field.height_field, height)\n\n # Save the object, because it has changed.\n self.save()\n\n# IMAGE FIELD METHODS ######################\n\ndef method_get_image_width(field, self):\n return _get_image_dimensions(field, self)[0]\n\ndef method_get_image_height(field, self):\n return _get_image_dimensions(field, self)[1]\n\ndef _get_image_dimensions(field, self):\n cachename = \"__%s_dimensions_cache\" % field.name\n if not hasattr(self, cachename):\n from django.utils.images import get_image_dimensions\n fname = getattr(self, \"get_%s_filename\" % field.name)()\n setattr(self, cachename, get_image_dimensions(fname))\n return getattr(self, cachename)\n\n##############################################\n# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) #\n##############################################\n\ndef get_absolute_url(opts, func, self):\n return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.module_name), func)(self)\n\ndef _get_where_clause(lookup_type, table_prefix, field_name, value):\n if table_prefix.endswith('.'):\n table_prefix = db.db.quote_name(table_prefix[:-1])+'.'\n field_name = db.db.quote_name(field_name)\n try:\n return '%s%s %s %%s' % (table_prefix, field_name, db.OPERATOR_MAPPING[lookup_type])\n except KeyError:\n pass\n if lookup_type == 'in':\n return '%s%s IN (%s)' % (table_prefix, field_name, ','.join(['%s' for v in value]))\n elif lookup_type == 'range':\n return '%s%s BETWEEN %%s AND %%s' % (table_prefix, field_name)\n elif lookup_type in ('year', 'month', 'day'):\n return \"%s = %%s\" % db.get_date_extract_sql(lookup_type, table_prefix + field_name)\n elif lookup_type == 'isnull':\n return \"%s%s IS %sNULL\" % (table_prefix, field_name, (not value and 'NOT ' or ''))\n raise TypeError, \"Got invalid lookup_type: %s\" % repr(lookup_type)\n\ndef function_get_object(opts, klass, does_not_exist_exception, **kwargs):\n obj_list = function_get_list(opts, klass, **kwargs)\n if len(obj_list) < 1:\n raise does_not_exist_exception, \"%s does not exist for %s\" % (opts.object_name, kwargs)\n assert len(obj_list) == 1, \"get_object() returned more than one %s -- it returned %s! Lookup parameters were %s\" % (opts.object_name, len(obj_list), kwargs)\n return obj_list[0]\n\ndef _get_cached_row(opts, row, index_start):\n \"Helper function that recursively returns an object with cache filled\"\n index_end = index_start + len(opts.fields)\n obj = opts.get_model_module().Klass(*row[index_start:index_end])\n for f in opts.fields:\n if f.rel and not f.null:\n rel_obj, index_end = _get_cached_row(f.rel.to, row, index_end)\n setattr(obj, f.get_cache_name(), rel_obj)\n return obj, index_end\n\ndef function_get_iterator(opts, klass, **kwargs):\n # kwargs['select'] is a dictionary, and dictionaries' key order is\n # undefined, so we convert it to a list of tuples internally.\n kwargs['select'] = kwargs.get('select', {}).items()\n\n cursor = db.db.cursor()\n select, sql, params = function_get_sql_clause(opts, **kwargs)\n cursor.execute(\"SELECT \" + (kwargs.get('distinct') and \"DISTINCT \" or \"\") + \",\".join(select) + sql, params)\n fill_cache = kwargs.get('select_related')\n index_end = len(opts.fields)\n while 1:\n rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)\n if not rows:\n raise StopIteration\n for row in rows:\n if fill_cache:\n obj, index_end = _get_cached_row(opts, row, 0)\n else:\n obj = klass(*row[:index_end])\n for i, k in enumerate(kwargs['select']):\n setattr(obj, k[0], row[index_end+i])\n yield obj\n\ndef function_get_list(opts, klass, **kwargs):\n return list(function_get_iterator(opts, klass, **kwargs))\n\ndef function_get_count(opts, **kwargs):\n kwargs['order_by'] = []\n kwargs['offset'] = None\n kwargs['limit'] = None\n kwargs['select_related'] = False\n _, sql, params = function_get_sql_clause(opts, **kwargs)\n cursor = db.db.cursor()\n cursor.execute(\"SELECT COUNT(*)\" + sql, params)\n return cursor.fetchone()[0]\n\ndef function_get_values_iterator(opts, klass, **kwargs):\n # select_related and select aren't supported in get_values().\n kwargs['select_related'] = False\n kwargs['select'] = {}\n\n # 'fields' is a list of field names to fetch.\n try:\n fields = [opts.get_field(f).column for f in kwargs.pop('fields')]\n except KeyError: # Default to all fields.\n fields = [f.column for f in opts.fields]\n\n cursor = db.db.cursor()\n _, sql, params = function_get_sql_clause(opts, **kwargs)\n select = ['%s.%s' % (db.db.quote_name(opts.db_table), db.db.quote_name(f)) for f in fields]\n cursor.execute(\"SELECT \" + (kwargs.get('distinct') and \"DISTINCT \" or \"\") + \",\".join(select) + sql, params)\n while 1:\n rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)\n if not rows:\n raise StopIteration\n for row in rows:\n yield dict(zip(fields, row))\n\ndef function_get_values(opts, klass, **kwargs):\n return list(function_get_values_iterator(opts, klass, **kwargs))\n\ndef _fill_table_cache(opts, select, tables, where, old_prefix, cache_tables_seen):\n \"\"\"\n Helper function that recursively populates the select, tables and where (in\n place) for fill-cache queries.\n \"\"\"\n for f in opts.fields:\n if f.rel and not f.null:\n db_table = f.rel.to.db_table\n if db_table not in cache_tables_seen:\n tables.append(db.db.quote_name(db_table))\n else: # The table was already seen, so give it a table alias.\n new_prefix = '%s%s' % (db_table, len(cache_tables_seen))\n tables.append('%s %s' % (db.db.quote_name(db_table), db.db.quote_name(new_prefix)))\n db_table = new_prefix\n cache_tables_seen.append(db_table)\n where.append('%s.%s = %s.%s' % \\\n (db.db.quote_name(old_prefix), db.db.quote_name(f.column),\n db.db.quote_name(db_table), db.db.quote_name(f.rel.get_related_field().column)))\n select.extend(['%s.%s' % (db.db.quote_name(db_table), db.db.quote_name(f2.column)) for f2 in f.rel.to.fields])\n _fill_table_cache(f.rel.to, select, tables, where, db_table, cache_tables_seen)\n\ndef _throw_bad_kwarg_error(kwarg):\n # Helper function to remove redundancy.\n raise TypeError, \"got unexpected keyword argument '%s'\" % kwarg\n\ndef _parse_lookup(kwarg_items, opts, table_count=0):\n # Helper function that handles converting API kwargs (e.g.\n # \"name__exact\": \"tom\") to SQL.\n\n # Note that there is a distinction between where and join_where. The latter\n # is specifically a list of where clauses to use for JOINs. This\n # distinction is necessary because of support for \"_or\".\n\n # table_count is used to ensure table aliases are unique.\n tables, join_where, where, params = [], [], [], []\n for kwarg, kwarg_value in kwarg_items:\n if kwarg in ('order_by', 'limit', 'offset', 'select_related', 'distinct', 'select', 'tables', 'where', 'params'):\n continue\n if kwarg_value is None:\n continue\n if kwarg == '_or':\n for val in kwarg_value:\n tables2, join_where2, where2, params2, table_count = _parse_lookup(val, opts, table_count)\n tables.extend(tables2)\n join_where.extend(join_where2)\n where.append('(%s)' % ' OR '.join(where2))\n params.extend(params2)\n continue\n lookup_list = kwarg.split(LOOKUP_SEPARATOR)\n # pk=\"value\" is shorthand for (primary key)__exact=\"value\"\n if lookup_list[-1] == 'pk':\n if opts.pk.rel:\n lookup_list = lookup_list[:-1] + [opts.pk.name, opts.pk.rel.field_name, 'exact']\n else:\n lookup_list = lookup_list[:-1] + [opts.pk.name, 'exact']\n if len(lookup_list) == 1:\n _throw_bad_kwarg_error(kwarg)\n lookup_type = lookup_list.pop()\n current_opts = opts # We'll be overwriting this, so keep a reference to the original opts.\n current_table_alias = current_opts.db_table\n param_required = False\n while lookup_list or param_required:\n table_count += 1\n try:\n # \"current\" is a piece of the lookup list. For example, in\n # choices.get_list(poll__sites__id__exact=5), lookup_list is\n # [\"polls\", \"sites\", \"id\"], and the first current is \"polls\".\n try:\n current = lookup_list.pop(0)\n except IndexError:\n # If we're here, lookup_list is empty but param_required\n # is set to True, which means the kwarg was bad.\n # Example: choices.get_list(poll__exact='foo')\n _throw_bad_kwarg_error(kwarg)\n # Try many-to-many relationships first...\n for f in current_opts.many_to_many:\n if f.name == current:\n rel_table_alias = db.db.quote_name('t%s' % table_count)\n table_count += 1\n tables.append('%s %s' % \\\n (db.db.quote_name(f.get_m2m_db_table(current_opts)), rel_table_alias))\n join_where.append('%s.%s = %s.%s' % \\\n (db.db.quote_name(current_table_alias),\n db.db.quote_name(current_opts.pk.column),\n rel_table_alias,\n db.db.quote_name(current_opts.object_name.lower() + '_id')))\n # Optimization: In the case of primary-key lookups, we\n # don't have to do an extra join.\n if lookup_list and lookup_list[0] == f.rel.to.pk.name and lookup_type == 'exact':\n where.append(_get_where_clause(lookup_type, rel_table_alias+'.',\n f.rel.to.object_name.lower()+'_id', kwarg_value))\n params.extend(f.get_db_prep_lookup(lookup_type, kwarg_value))\n lookup_list.pop()\n param_required = False\n else:\n new_table_alias = 't%s' % table_count\n tables.append('%s %s' % (db.db.quote_name(f.rel.to.db_table),\n db.db.quote_name(new_table_alias)))\n join_where.append('%s.%s = %s.%s' % \\\n (db.db.quote_name(rel_table_alias),\n db.db.quote_name(f.rel.to.object_name.lower() + '_id'),\n db.db.quote_name(new_table_alias),\n db.db.quote_name(f.rel.to.pk.column)))\n current_table_alias = new_table_alias\n param_required = True\n current_opts = f.rel.to\n raise StopIteration\n for f in current_opts.fields:\n # Try many-to-one relationships...\n if f.rel and f.name == current:\n # Optimization: In the case of primary-key lookups, we\n # don't have to do an extra join.\n if lookup_list and lookup_list[0] == f.rel.to.pk.name and lookup_type == 'exact':\n where.append(_get_where_clause(lookup_type, current_table_alias+'.', f.column, kwarg_value))\n params.extend(f.get_db_prep_lookup(lookup_type, kwarg_value))\n lookup_list.pop()\n param_required = False\n # 'isnull' lookups in many-to-one relationships are a special case,\n # because we don't want to do a join. We just want to find out\n # whether the foreign key field is NULL.\n elif lookup_type == 'isnull' and not lookup_list:\n where.append(_get_where_clause(lookup_type, current_table_alias+'.', f.column, kwarg_value))\n params.extend(f.get_db_prep_lookup(lookup_type, kwarg_value))\n else:\n new_table_alias = 't%s' % table_count\n tables.append('%s %s' % \\\n (db.db.quote_name(f.rel.to.db_table), db.db.quote_name(new_table_alias)))\n join_where.append('%s.%s = %s.%s' % \\\n (db.db.quote_name(current_table_alias), db.db.quote_name(f.column),\n db.db.quote_name(new_table_alias), db.db.quote_name(f.rel.to.pk.column)))\n current_table_alias = new_table_alias\n param_required = True\n current_opts = f.rel.to\n raise StopIteration\n # Try direct field-name lookups...\n if f.name == current:\n where.append(_get_where_clause(lookup_type, current_table_alias+'.', f.column, kwarg_value))\n params.extend(f.get_db_prep_lookup(lookup_type, kwarg_value))\n param_required = False\n raise StopIteration\n # If we haven't hit StopIteration at this point, \"current\" must be\n # an invalid lookup, so raise an exception.\n _throw_bad_kwarg_error(kwarg)\n except StopIteration:\n continue\n return tables, join_where, where, params, table_count\n\ndef function_get_sql_clause(opts, **kwargs):\n select = [\"%s.%s\" % (db.db.quote_name(opts.db_table), db.db.quote_name(f.column)) for f in opts.fields]\n tables = [opts.db_table] + (kwargs.get('tables') and kwargs['tables'][:] or [])\n tables = [db.db.quote_name(t) for t in tables]\n where = kwargs.get('where') and kwargs['where'][:] or []\n params = kwargs.get('params') and kwargs['params'][:] or []\n\n # Convert the kwargs into SQL.\n tables2, join_where2, where2, params2, _ = _parse_lookup(kwargs.items(), opts)\n tables.extend(tables2)\n where.extend(join_where2 + where2)\n params.extend(params2)\n\n # Add any additional constraints from the \"where_constraints\" parameter.\n where.extend(opts.where_constraints)\n\n # Add additional tables and WHERE clauses based on select_related.\n if kwargs.get('select_related') is True:\n _fill_table_cache(opts, select, tables, where, opts.db_table, [opts.db_table])\n\n # Add any additional SELECTs passed in via kwargs.\n if kwargs.get('select'):\n select.extend(['(%s) AS %s' % (db.db.quote_name(s[1]), db.db.quote_name(s[0])) for s in kwargs['select']])\n\n # ORDER BY clause\n order_by = []\n for f in handle_legacy_orderlist(kwargs.get('order_by', opts.ordering)):\n if f == '?': # Special case.\n order_by.append(db.get_random_function_sql())\n else:\n if f.startswith('-'):\n col_name = f[1:]\n order = \"DESC\"\n else:\n col_name = f\n order = \"ASC\"\n if \".\" in col_name:\n table_prefix, col_name = col_name.split('.', 1)\n table_prefix = db.db.quote_name(table_prefix) + '.'\n else:\n # Use the database table as a column prefix if it wasn't given,\n # and if the requested column isn't a custom SELECT.\n if \".\" not in col_name and col_name not in [k[0] for k in kwargs.get('select', [])]:\n table_prefix = db.db.quote_name(opts.db_table) + '.'\n else:\n table_prefix = ''\n order_by.append('%s%s %s' % (table_prefix, db.db.quote_name(orderfield2column(col_name, opts)), order))\n order_by = \", \".join(order_by)\n\n # LIMIT and OFFSET clauses\n if kwargs.get('limit') is not None:\n limit_sql = \" %s \" % db.get_limit_offset_sql(kwargs['limit'], kwargs.get('offset'))\n else:\n assert kwargs.get('offset') is None, \"'offset' is not allowed without 'limit'\"\n limit_sql = \"\"\n\n return select, \" FROM \" + \",\".join(tables) + (where and \" WHERE \" + \" AND \".join(where) or \"\") + (order_by and \" ORDER BY \" + order_by or \"\") + limit_sql, params\n\ndef function_get_in_bulk(opts, klass, *args, **kwargs):\n id_list = args and args[0] or kwargs['id_list']\n assert id_list != [], \"get_in_bulk() cannot be passed an empty list.\"\n kwargs['where'] = [\"%s.%s IN (%s)\" % (db.db.quote_name(opts.db_table), db.db.quote_name(opts.pk.column), \",\".join(['%s'] * len(id_list)))]\n kwargs['params'] = id_list\n obj_list = function_get_list(opts, klass, **kwargs)\n return dict([(getattr(o, opts.pk.attname), o) for o in obj_list])\n\ndef function_get_latest(opts, klass, does_not_exist_exception, **kwargs):\n kwargs['order_by'] = ('-' + opts.get_latest_by,)\n kwargs['limit'] = 1\n return function_get_object(opts, klass, does_not_exist_exception, **kwargs)\n\ndef function_get_date_list(opts, field, *args, **kwargs):\n from django.core.db.typecasts import typecast_timestamp\n kind = args and args[0] or kwargs['kind']\n assert kind in (\"month\", \"year\", \"day\"), \"'kind' must be one of 'year', 'month' or 'day'.\"\n order = 'ASC'\n if kwargs.has_key('_order'):\n order = kwargs['_order']\n del kwargs['_order']\n assert order in ('ASC', 'DESC'), \"'order' must be either 'ASC' or 'DESC'\"\n kwargs['order_by'] = [] # Clear this because it'll mess things up otherwise.\n if field.null:\n kwargs.setdefault('where', []).append('%s.%s IS NOT NULL' % \\\n (db.db.quote_name(opts.db_table), db.db.quote_name(field.column)))\n select, sql, params = function_get_sql_clause(opts, **kwargs)\n sql = 'SELECT %s %s GROUP BY 1 ORDER BY 1' % (db.get_date_trunc_sql(kind, '%s.%s' % (db.db.quote_name(opts.db_table), db.db.quote_name(field.column))), sql)\n cursor = db.db.cursor()\n cursor.execute(sql, params)\n # We have to manually run typecast_timestamp(str()) on the results, because\n # MySQL doesn't automatically cast the result of date functions as datetime\n # objects -- MySQL returns the values as strings, instead.\n return [typecast_timestamp(str(row[0])) for row in cursor.fetchall()]\n\n###################################\n# HELPER FUNCTIONS (MANIPULATORS) #\n###################################\n\ndef get_manipulator(opts, klass, extra_methods, add=False, change=False):\n \"Returns the custom Manipulator (either add or change) for the given opts.\"\n assert (add == False or change == False) and add != change, \"get_manipulator() can be passed add=True or change=True, but not both\"\n man = types.ClassType('%sManipulator%s' % (opts.object_name, add and 'Add' or 'Change'), (formfields.Manipulator,), {})\n man.__module__ = MODEL_PREFIX + '.' + opts.module_name # Set this explicitly, as above.\n man.__init__ = curry(manipulator_init, opts, add, change)\n man.save = curry(manipulator_save, opts, klass, add, change)\n for field_name_list in opts.unique_together:\n setattr(man, 'isUnique%s' % '_'.join(field_name_list), curry(manipulator_validator_unique_together, field_name_list, opts))\n for f in opts.fields:\n if f.unique_for_date:\n setattr(man, 'isUnique%sFor%s' % (f.name, f.unique_for_date), curry(manipulator_validator_unique_for_date, f, opts.get_field(f.unique_for_date), opts, 'date'))\n if f.unique_for_month:\n setattr(man, 'isUnique%sFor%s' % (f.name, f.unique_for_month), curry(manipulator_validator_unique_for_date, f, opts.get_field(f.unique_for_month), opts, 'month'))\n if f.unique_for_year:\n setattr(man, 'isUnique%sFor%s' % (f.name, f.unique_for_year), curry(manipulator_validator_unique_for_date, f, opts.get_field(f.unique_for_year), opts, 'year'))\n for k, v in extra_methods.items():\n setattr(man, k, v)\n return man\n\ndef manipulator_init(opts, add, change, self, obj_key=None):\n if change:\n assert obj_key is not None, \"ChangeManipulator.__init__() must be passed obj_key parameter.\"\n self.obj_key = obj_key\n try:\n self.original_object = opts.get_model_module().get_object(pk=obj_key)\n except ObjectDoesNotExist:\n # If the object doesn't exist, this might be a manipulator for a\n # one-to-one related object that hasn't created its subobject yet.\n # For example, this might be a Restaurant for a Place that doesn't\n # yet have restaurant information.\n if opts.one_to_one_field:\n # Sanity check -- Make sure the \"parent\" object exists.\n # For example, make sure the Place exists for the Restaurant.\n # Let the ObjectDoesNotExist exception propogate up.\n lookup_kwargs = opts.one_to_one_field.rel.limit_choices_to\n lookup_kwargs['%s__exact' % opts.one_to_one_field.rel.field_name] = obj_key\n _ = opts.one_to_one_field.rel.to.get_model_module().get_object(**lookup_kwargs)\n params = dict([(f.attname, f.get_default()) for f in opts.fields])\n params[opts.pk.attname] = obj_key\n self.original_object = opts.get_model_module().Klass(**params)\n else:\n raise\n self.fields = []\n for f in opts.fields + opts.many_to_many:\n if f.editable and not (f.primary_key and change) and (not f.rel or not f.rel.edit_inline):\n self.fields.extend(f.get_manipulator_fields(opts, self, change))\n\n # Add fields for related objects.\n for rel_opts, rel_field in opts.get_inline_related_objects():\n if change:\n count = getattr(self.original_object, 'get_%s_count' % opts.get_rel_object_method_name(rel_opts, rel_field))()\n count += rel_field.rel.num_extra_on_change\n if rel_field.rel.min_num_in_admin:\n count = max(count, rel_field.rel.min_num_in_admin)\n if rel_field.rel.max_num_in_admin:\n count = min(count, rel_field.rel.max_num_in_admin)\n else:\n count = rel_field.rel.num_in_admin\n for f in rel_opts.fields + rel_opts.many_to_many:\n if f.editable and f != rel_field and (not f.primary_key or (f.primary_key and change)):\n for i in range(count):\n self.fields.extend(f.get_manipulator_fields(rel_opts, self, change, name_prefix='%s.%d.' % (rel_opts.object_name.lower(), i), rel=True))\n\n # Add field for ordering.\n if change and opts.get_ordered_objects():\n self.fields.append(formfields.CommaSeparatedIntegerField(field_name=\"order_\"))\n\ndef manipulator_save(opts, klass, add, change, self, new_data):\n from django.utils.datastructures import DotExpandedDict\n params = {}\n for f in opts.fields:\n # Fields with auto_now_add are another special case; they should keep\n # their original value in the change stage.\n if change and getattr(f, 'auto_now_add', False):\n params[f.attname] = getattr(self.original_object, f.attname)\n else:\n params[f.attname] = f.get_manipulator_new_data(new_data)\n\n if change:\n params[opts.pk.attname] = self.obj_key\n\n # First, save the basic object itself.\n new_object = klass(**params)\n new_object.save()\n\n # Now that the object's been saved, save any uploaded files.\n for f in opts.fields:\n if isinstance(f, FileField):\n f.save_file(new_data, new_object, change and self.original_object or None, change, rel=False)\n\n # Calculate which primary fields have changed.\n if change:\n self.fields_added, self.fields_changed, self.fields_deleted = [], [], []\n for f in opts.fields:\n if not f.primary_key and str(getattr(self.original_object, f.attname)) != str(getattr(new_object, f.attname)):\n self.fields_changed.append(f.verbose_name)\n\n # Save many-to-many objects. Example: Poll.set_sites()\n for f in opts.many_to_many:\n if not f.rel.edit_inline:\n was_changed = getattr(new_object, 'set_%s' % f.name)(new_data.getlist(f.name))\n if change and was_changed:\n self.fields_changed.append(f.verbose_name)\n\n # Save many-to-one objects. Example: Add the Choice objects for a Poll.\n for rel_opts, rel_field in opts.get_inline_related_objects():\n # Create obj_list, which is a DotExpandedDict such as this:\n # [('0', {'id': ['940'], 'choice': ['This is the first choice']}),\n # ('1', {'id': ['941'], 'choice': ['This is the second choice']}),\n # ('2', {'id': [''], 'choice': ['']})]\n obj_list = DotExpandedDict(new_data.data)[rel_opts.object_name.lower()].items()\n obj_list.sort(lambda x, y: cmp(int(x[0]), int(y[0])))\n params = {}\n\n # For each related item...\n for _, rel_new_data in obj_list:\n\n # Keep track of which core=True fields were provided.\n # If all core fields were given, the related object will be saved.\n # If none of the core fields were given, the object will be deleted.\n # If some, but not all, of the fields were given, the validator would\n # have caught that.\n all_cores_given, all_cores_blank = True, True\n\n # Get a reference to the old object. We'll use it to compare the\n # old to the new, to see which fields have changed.\n if change:\n old_rel_obj = None\n if rel_new_data[rel_opts.pk.name][0]:\n try:\n old_rel_obj = getattr(self.original_object, 'get_%s' % opts.get_rel_object_method_name(rel_opts, rel_field))(**{'%s__exact' % rel_opts.pk.name: rel_new_data[rel_opts.pk.attname][0]})\n except ObjectDoesNotExist:\n pass\n\n for f in rel_opts.fields:\n if f.core and not isinstance(f, FileField) and f.get_manipulator_new_data(rel_new_data, rel=True) in (None, ''):\n all_cores_given = False\n elif f.core and not isinstance(f, FileField) and f.get_manipulator_new_data(rel_new_data, rel=True) not in (None, ''):\n all_cores_blank = False\n # If this field isn't editable, give it the same value it had\n # previously, according to the given ID. If the ID wasn't\n # given, use a default value. FileFields are also a special\n # case, because they'll be dealt with later.\n if change and (isinstance(f, FileField) or not f.editable):\n if rel_new_data.get(rel_opts.pk.attname, False) and rel_new_data[rel_opts.pk.attname][0]:\n params[f.attname] = getattr(old_rel_obj, f.attname)\n else:\n params[f.attname] = f.get_default()\n elif f == rel_field:\n params[f.attname] = getattr(new_object, rel_field.rel.field_name)\n elif add and isinstance(f, AutoField):\n params[f.attname] = None\n else:\n params[f.attname] = f.get_manipulator_new_data(rel_new_data, rel=True)\n # Related links are a special case, because we have to\n # manually set the \"content_type_id\" and \"object_id\" fields.\n if opts.has_related_links and rel_opts.module_name == 'relatedlinks':\n contenttypes_mod = get_module('core', 'contenttypes')\n params['content_type_id'] = contenttypes_mod.get_object(package__label__exact=opts.app_label, python_module_name__exact=opts.module_name).id\n params['object_id'] = new_object.id\n\n # Create the related item.\n new_rel_obj = rel_opts.get_model_module().Klass(**params)\n\n # If all the core fields were provided (non-empty), save the item.\n if all_cores_given:\n new_rel_obj.save()\n\n # Save any uploaded files.\n for f in rel_opts.fields:\n if isinstance(f, FileField) and rel_new_data.get(f.attname, False):\n f.save_file(rel_new_data, new_rel_obj, change and old_rel_obj or None, old_rel_obj is not None, rel=True)\n\n # Calculate whether any fields have changed.\n if change:\n if not old_rel_obj: # This object didn't exist before.\n self.fields_added.append('%s \"%r\"' % (rel_opts.verbose_name, new_rel_obj))\n else:\n for f in rel_opts.fields:\n if not f.primary_key and f != rel_field and str(getattr(old_rel_obj, f.attname)) != str(getattr(new_rel_obj, f.attname)):\n self.fields_changed.append('%s for %s \"%r\"' % (f.verbose_name, rel_opts.verbose_name, new_rel_obj))\n\n # Save many-to-many objects.\n for f in rel_opts.many_to_many:\n if not f.rel.edit_inline:\n was_changed = getattr(new_rel_obj, 'set_%s' % f.name)(rel_new_data[f.attname])\n if change and was_changed:\n self.fields_changed.append('%s for %s \"%s\"' % (f.verbose_name, rel_opts.verbose_name, new_rel_obj))\n\n # If, in the change stage, all of the core fields were blank and\n # the primary key (ID) was provided, delete the item.\n if change and all_cores_blank and rel_new_data.has_key(rel_opts.pk.attname) and rel_new_data[rel_opts.pk.attname][0]:\n new_rel_obj.delete()\n self.fields_deleted.append('%s \"%r\"' % (rel_opts.verbose_name, old_rel_obj))\n\n # Save the order, if applicable.\n if change and opts.get_ordered_objects():\n order = new_data['order_'] and map(int, new_data['order_'].split(',')) or []\n for rel_opts in opts.get_ordered_objects():\n getattr(new_object, 'set_%s_order' % rel_opts.object_name.lower())(order)\n return new_object\n\ndef manipulator_validator_unique_together(field_name_list, opts, self, field_data, all_data):\n from django.utils.text import get_text_list\n field_list = [opts.get_field(field_name) for field_name in field_name_list]\n if isinstance(field_list[0].rel, ManyToOne):\n kwargs = {'%s__%s__iexact' % (field_name_list[0], field_list[0].rel.field_name): field_data}\n else:\n kwargs = {'%s__iexact' % field_name_list[0]: field_data}\n for f in field_list[1:]:\n field_val = all_data.get(f.attname, None)\n if field_val is None:\n # This will be caught by another validator, assuming the field\n # doesn't have blank=True.\n return\n if isinstance(f.rel, ManyToOne):\n kwargs['%s__pk' % f.name] = field_val\n else:\n kwargs['%s__iexact' % f.name] = field_val\n mod = opts.get_model_module()\n try:\n old_obj = mod.get_object(**kwargs)\n except ObjectDoesNotExist:\n return\n if hasattr(self, 'original_object') and getattr(self.original_object, opts.pk.attname) == getattr(old_obj, opts.pk.attname):\n pass\n else:\n raise validators.ValidationError, \"%s with this %s already exists for the given %s.\" % \\\n (capfirst(opts.verbose_name), field_list[0].verbose_name, get_text_list(field_name_list[1:], 'and'))\n\ndef manipulator_validator_unique_for_date(from_field, date_field, opts, lookup_type, self, field_data, all_data):\n date_str = all_data.get(date_field.get_manipulator_field_names('')[0], None)\n mod = opts.get_model_module()\n date_val = formfields.DateField.html2python(date_str)\n if date_val is None:\n return # Date was invalid. This will be caught by another validator.\n lookup_kwargs = {'%s__year' % date_field.name: date_val.year}\n if isinstance(from_field.rel, ManyToOne):\n lookup_kwargs['%s__pk' % from_field.name] = field_data\n else:\n lookup_kwargs['%s__iexact' % from_field.name] = field_data\n if lookup_type in ('month', 'date'):\n lookup_kwargs['%s__month' % date_field.name] = date_val.month\n if lookup_type == 'date':\n lookup_kwargs['%s__day' % date_field.name] = date_val.day\n try:\n old_obj = mod.get_object(**lookup_kwargs)\n except ObjectDoesNotExist:\n return\n else:\n if hasattr(self, 'original_object') and getattr(self.original_object, opts.pk.attname) == getattr(old_obj, opts.pk.attname):\n pass\n else:\n format_string = (lookup_type == 'date') and '%B %d, %Y' or '%B %Y'\n raise validators.ValidationError, \"Please enter a different %s. The one you entered is already being used for %s.\" % \\\n (from_field.verbose_name, date_val.strftime(format_string))\n", "django/core/meta/fields.py": "from django.conf import settings\nfrom django.core import formfields, validators\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.utils.functional import curry, lazy\nfrom django.utils.text import capfirst\nfrom django.utils.translation import gettext_lazy\nimport datetime, os\n\n# Random entropy string used by \"default\" param.\nNOT_PROVIDED = 'oijpwojefiojpanv'\n\n# Values for filter_interface.\nHORIZONTAL, VERTICAL = 1, 2\n\n# The values to use for \"blank\" in SelectFields. Will be appended to the start of most \"choices\" lists.\nBLANK_CHOICE_DASH = [(\"\", \"---------\")]\nBLANK_CHOICE_NONE = [(\"\", \"None\")]\n\n# Values for Relation.edit_inline.\nTABULAR, STACKED = 1, 2\n\nRECURSIVE_RELATIONSHIP_CONSTANT = 'self'\n\n# prepares a value for use in a LIKE query\nprep_for_like_query = lambda x: str(x).replace(\"%\", \"\\%\").replace(\"_\", \"\\_\")\n\n# returns the
class for a given radio_admin value\nget_ul_class = lambda x: 'radiolist%s' % ((x == HORIZONTAL) and ' inline' or '')\n\ndef string_concat(*strings):\n \"\"\"\"\n lazy variant of string concatenation, needed for translations that are\n constructed from multiple parts. Handles lazy strings and non-strings by\n first turning all arguments to strings, before joining them.\n \"\"\"\n return ''.join([str(el) for el in strings])\n\nstring_concat = lazy(string_concat, str)\n\ndef manipulator_valid_rel_key(f, self, field_data, all_data):\n \"Validates that the value is a valid foreign key\"\n mod = f.rel.to.get_model_module()\n try:\n mod.get_object(pk=field_data)\n except ObjectDoesNotExist:\n raise validators.ValidationError, \"Please enter a valid %s.\" % f.verbose_name\n\ndef manipulator_validator_unique(f, opts, self, field_data, all_data):\n \"Validates that the value is unique for this field.\"\n if f.rel and isinstance(f.rel, ManyToOne):\n lookup_type = 'pk'\n else:\n lookup_type = 'exact'\n try:\n old_obj = opts.get_model_module().get_object(**{'%s__%s' % (f.name, lookup_type): field_data})\n except ObjectDoesNotExist:\n return\n if hasattr(self, 'original_object') and getattr(self.original_object, opts.pk.attname) == getattr(old_obj, opts.pk.attname):\n return\n raise validators.ValidationError, \"%s with this %s already exists.\" % (capfirst(opts.verbose_name), f.verbose_name)\n\n\n# A guide to Field parameters:\n#\n# * name: The name of the field specifed in the model.\n# * attname: The attribute to use on the model object. This is the same as\n# \"name\", except in the case of ForeignKeys, where \"_id\" is\n# appended.\n# * db_column: The db_column specified in the model (or None).\n# * column: The database column for this field. This is the same as\n# \"attname\", except if db_column is specified.\n#\n# Code that introspects values, or does other dynamic things, should use\n# attname. For example, this gets the primary key value of object \"obj\":\n#\n# getattr(obj, opts.pk.attname)\n\nclass Field(object):\n\n # Designates whether empty strings fundamentally are allowed at the\n # database level.\n empty_strings_allowed = True\n\n # Tracks each time a Field instance is created. Used to retain order.\n creation_counter = 0\n\n def __init__(self, verbose_name=None, name=None, primary_key=False,\n maxlength=None, unique=False, blank=False, null=False, db_index=None,\n core=False, rel=None, default=NOT_PROVIDED, editable=True,\n prepopulate_from=None, unique_for_date=None, unique_for_month=None,\n unique_for_year=None, validator_list=None, choices=None, radio_admin=None,\n help_text='', db_column=None):\n self.name = name\n self.verbose_name = verbose_name or (name and name.replace('_', ' '))\n self.primary_key = primary_key\n self.maxlength, self.unique = maxlength, unique\n self.blank, self.null = blank, null\n self.core, self.rel, self.default = core, rel, default\n self.editable = editable\n self.validator_list = validator_list or []\n self.prepopulate_from = prepopulate_from\n self.unique_for_date, self.unique_for_month = unique_for_date, unique_for_month\n self.unique_for_year = unique_for_year\n self.choices = choices or []\n self.radio_admin = radio_admin\n self.help_text = help_text\n self.db_column = db_column\n if rel and isinstance(rel, ManyToMany):\n if rel.raw_id_admin:\n self.help_text = string_concat(self.help_text,\n gettext_lazy(' Separate multiple IDs with commas.'))\n else:\n self.help_text = string_concat(self.help_text,\n gettext_lazy(' Hold down \"Control\", or \"Command\" on a Mac, to select more than one.'))\n\n # Set db_index to True if the field has a relationship and doesn't explicitly set db_index.\n if db_index is None:\n if isinstance(rel, OneToOne) or isinstance(rel, ManyToOne):\n self.db_index = True\n else:\n self.db_index = False\n else:\n self.db_index = db_index\n\n # Increase the creation counter, and save our local copy.\n self.creation_counter = Field.creation_counter\n Field.creation_counter += 1\n\n self.attname, self.column = self.get_attname_column()\n\n def set_name(self, name):\n self.name = name\n self.verbose_name = self.verbose_name or name.replace('_', ' ')\n self.attname, self.column = self.get_attname_column()\n\n def get_attname_column(self):\n if isinstance(self.rel, ManyToOne):\n attname = '%s_id' % self.name\n else:\n attname = self.name\n column = self.db_column or attname\n return attname, column\n\n def get_cache_name(self):\n return '_%s_cache' % self.name\n\n def get_internal_type(self):\n return self.__class__.__name__\n\n def pre_save(self, value, add):\n \"Returns field's value just before saving.\"\n return value\n\n def get_db_prep_save(self, value):\n \"Returns field's value prepared for saving into a database.\"\n return value\n\n def get_db_prep_lookup(self, lookup_type, value):\n \"Returns field's value prepared for database lookup.\"\n if lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte', 'ne', 'year', 'month', 'day'):\n return [value]\n elif lookup_type in ('range', 'in'):\n return value\n elif lookup_type in ('contains', 'icontains'):\n return [\"%%%s%%\" % prep_for_like_query(value)]\n elif lookup_type == 'iexact':\n return [prep_for_like_query(value)]\n elif lookup_type in ('startswith', 'istartswith'):\n return [\"%s%%\" % prep_for_like_query(value)]\n elif lookup_type in ('endswith', 'iendswith'):\n return [\"%%%s\" % prep_for_like_query(value)]\n elif lookup_type == 'isnull':\n return []\n raise TypeError, \"Field has invalid lookup: %s\" % lookup_type\n\n def has_default(self):\n \"Returns a boolean of whether this field has a default value.\"\n return self.default != NOT_PROVIDED\n\n def get_default(self):\n \"Returns the default value for this field.\"\n if self.default != NOT_PROVIDED:\n if hasattr(self.default, '__get_value__'):\n return self.default.__get_value__()\n return self.default\n if self.null:\n return None\n return \"\"\n\n def get_manipulator_field_names(self, name_prefix):\n \"\"\"\n Returns a list of field names that this object adds to the manipulator.\n \"\"\"\n return [name_prefix + self.name]\n\n def get_manipulator_fields(self, opts, manipulator, change, name_prefix='', rel=False):\n \"\"\"\n Returns a list of formfields.FormField instances for this field. It\n calculates the choices at runtime, not at compile time.\n\n name_prefix is a prefix to prepend to the \"field_name\" argument.\n rel is a boolean specifying whether this field is in a related context.\n \"\"\"\n params = {'validator_list': self.validator_list[:]}\n if self.maxlength and not self.choices: # Don't give SelectFields a maxlength parameter.\n params['maxlength'] = self.maxlength\n if isinstance(self.rel, ManyToOne):\n if self.rel.raw_id_admin:\n field_objs = self.get_manipulator_field_objs()\n params['validator_list'].append(curry(manipulator_valid_rel_key, self, manipulator))\n else:\n if self.radio_admin:\n field_objs = [formfields.RadioSelectField]\n params['choices'] = self.get_choices(include_blank=self.blank, blank_choice=BLANK_CHOICE_NONE)\n params['ul_class'] = get_ul_class(self.radio_admin)\n else:\n if self.null:\n field_objs = [formfields.NullSelectField]\n else:\n field_objs = [formfields.SelectField]\n params['choices'] = self.get_choices()\n elif self.choices:\n if self.radio_admin:\n field_objs = [formfields.RadioSelectField]\n params['choices'] = self.get_choices(include_blank=self.blank, blank_choice=BLANK_CHOICE_NONE)\n params['ul_class'] = get_ul_class(self.radio_admin)\n else:\n field_objs = [formfields.SelectField]\n params['choices'] = self.get_choices()\n else:\n field_objs = self.get_manipulator_field_objs()\n\n # Add the \"unique\" validator(s).\n for field_name_list in opts.unique_together:\n if field_name_list[0] == self.name:\n params['validator_list'].append(getattr(manipulator, 'isUnique%s' % '_'.join(field_name_list)))\n\n # Add the \"unique for...\" validator(s).\n if self.unique_for_date:\n params['validator_list'].append(getattr(manipulator, 'isUnique%sFor%s' % (self.name, self.unique_for_date)))\n if self.unique_for_month:\n params['validator_list'].append(getattr(manipulator, 'isUnique%sFor%s' % (self.name, self.unique_for_month)))\n if self.unique_for_year:\n params['validator_list'].append(getattr(manipulator, 'isUnique%sFor%s' % (self.name, self.unique_for_year)))\n if self.unique or (self.primary_key and not rel):\n params['validator_list'].append(curry(manipulator_validator_unique, self, opts, manipulator))\n\n # Only add is_required=True if the field cannot be blank. Primary keys\n # are a special case, and fields in a related context should set this\n # as False, because they'll be caught by a separate validator --\n # RequiredIfOtherFieldGiven.\n params['is_required'] = not self.blank and not self.primary_key and not rel\n\n # If this field is in a related context, check whether any other fields\n # in the related object have core=True. If so, add a validator --\n # RequiredIfOtherFieldsGiven -- to this FormField.\n if rel and not self.blank and not isinstance(self, AutoField) and not isinstance(self, FileField):\n # First, get the core fields, if any.\n core_field_names = []\n for f in opts.fields:\n if f.core and f != self:\n core_field_names.extend(f.get_manipulator_field_names(name_prefix))\n # Now, if there are any, add the validator to this FormField.\n if core_field_names:\n params['validator_list'].append(validators.RequiredIfOtherFieldsGiven(core_field_names, \"This field is required.\"))\n\n # BooleanFields (CheckboxFields) are a special case. They don't take\n # is_required or validator_list.\n if isinstance(self, BooleanField):\n del params['validator_list'], params['is_required']\n\n # Finally, add the field_names.\n field_names = self.get_manipulator_field_names(name_prefix)\n return [man(field_name=field_names[i], **params) for i, man in enumerate(field_objs)]\n\n def get_manipulator_new_data(self, new_data, rel=False):\n \"\"\"\n Given the full new_data dictionary (from the manipulator), returns this\n field's data.\n \"\"\"\n if rel:\n return new_data.get(self.name, [self.get_default()])[0]\n else:\n val = new_data.get(self.name, self.get_default())\n if not self.empty_strings_allowed and val == '' and self.null:\n val = None\n return val\n\n def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH):\n \"Returns a list of tuples used as SelectField choices for this field.\"\n first_choice = include_blank and blank_choice or []\n if self.choices:\n return first_choice + list(self.choices)\n rel_obj = self.rel.to\n return first_choice + [(getattr(x, rel_obj.pk.attname), repr(x)) for x in rel_obj.get_model_module().get_list(**self.rel.limit_choices_to)]\n\nclass AutoField(Field):\n empty_strings_allowed = False\n def __init__(self, *args, **kwargs):\n assert kwargs.get('primary_key', False) is True, \"%ss must have primary_key=True.\" % self.__class__.__name__\n Field.__init__(self, *args, **kwargs)\n\n def get_manipulator_fields(self, opts, manipulator, change, name_prefix='', rel=False):\n if not rel:\n return [] # Don't add a FormField unless it's in a related context.\n return Field.get_manipulator_fields(self, opts, manipulator, change, name_prefix, rel)\n\n def get_manipulator_field_objs(self):\n return [formfields.HiddenField]\n\n def get_manipulator_new_data(self, new_data, rel=False):\n if not rel:\n return None\n return Field.get_manipulator_new_data(self, new_data, rel)\n\nclass BooleanField(Field):\n def __init__(self, *args, **kwargs):\n kwargs['blank'] = True\n Field.__init__(self, *args, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [formfields.CheckboxField]\n\nclass CharField(Field):\n def get_manipulator_field_objs(self):\n return [formfields.TextField]\n\nclass CommaSeparatedIntegerField(CharField):\n def get_manipulator_field_objs(self):\n return [formfields.CommaSeparatedIntegerField]\n\nclass DateField(Field):\n empty_strings_allowed = False\n def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs):\n self.auto_now, self.auto_now_add = auto_now, auto_now_add\n if auto_now or auto_now_add:\n kwargs['editable'] = False\n Field.__init__(self, verbose_name, name, **kwargs)\n\n def get_db_prep_lookup(self, lookup_type, value):\n if lookup_type == 'range':\n value = [str(v) for v in value]\n else:\n value = str(value)\n return Field.get_db_prep_lookup(self, lookup_type, value)\n\n def pre_save(self, value, add):\n if self.auto_now or (self.auto_now_add and add):\n return datetime.datetime.now()\n return value\n\n def get_db_prep_save(self, value):\n # Casts dates into string format for entry into database.\n if value is not None:\n value = value.strftime('%Y-%m-%d')\n return Field.get_db_prep_save(self, value)\n\n def get_manipulator_field_objs(self):\n return [formfields.DateField]\n\nclass DateTimeField(DateField):\n def get_db_prep_save(self, value):\n # Casts dates into string format for entry into database.\n if value is not None:\n # MySQL will throw a warning if microseconds are given, because it\n # doesn't support microseconds.\n if settings.DATABASE_ENGINE == 'mysql':\n value = value.replace(microsecond=0)\n value = str(value)\n return Field.get_db_prep_save(self, value)\n\n def get_manipulator_field_objs(self):\n return [formfields.DateField, formfields.TimeField]\n\n def get_manipulator_field_names(self, name_prefix):\n return [name_prefix + self.name + '_date', name_prefix + self.name + '_time']\n\n def get_manipulator_new_data(self, new_data, rel=False):\n date_field, time_field = self.get_manipulator_field_names('')\n if rel:\n d = new_data.get(date_field, [None])[0]\n t = new_data.get(time_field, [None])[0]\n else:\n d = new_data.get(date_field, None)\n t = new_data.get(time_field, None)\n if d is not None and t is not None:\n return datetime.datetime.combine(d, t)\n return self.get_default()\n\nclass EmailField(Field):\n def get_manipulator_field_objs(self):\n return [formfields.EmailField]\n\nclass FileField(Field):\n def __init__(self, verbose_name=None, name=None, upload_to='', **kwargs):\n self.upload_to = upload_to\n Field.__init__(self, verbose_name, name, **kwargs)\n\n def get_manipulator_fields(self, opts, manipulator, change, name_prefix='', rel=False):\n field_list = Field.get_manipulator_fields(self, opts, manipulator, change, name_prefix, rel)\n\n if not self.blank:\n if rel:\n # This validator makes sure FileFields work in a related context.\n class RequiredFileField:\n def __init__(self, other_field_names, other_file_field_name):\n self.other_field_names = other_field_names\n self.other_file_field_name = other_file_field_name\n self.always_test = True\n def __call__(self, field_data, all_data):\n if not all_data.get(self.other_file_field_name, False):\n c = validators.RequiredIfOtherFieldsGiven(self.other_field_names, \"This field is required.\")\n c(field_data, all_data)\n # First, get the core fields, if any.\n core_field_names = []\n for f in opts.fields:\n if f.core and f != self:\n core_field_names.extend(f.get_manipulator_field_names(name_prefix))\n # Now, if there are any, add the validator to this FormField.\n if core_field_names:\n field_list[0].validator_list.append(RequiredFileField(core_field_names, field_list[1].field_name))\n else:\n v = validators.RequiredIfOtherFieldNotGiven(field_list[1].field_name, \"This field is required.\")\n v.always_test = True\n field_list[0].validator_list.append(v)\n field_list[0].is_required = field_list[1].is_required = False\n\n # If the raw path is passed in, validate it's under the MEDIA_ROOT.\n def isWithinMediaRoot(field_data, all_data):\n f = os.path.abspath(os.path.join(settings.MEDIA_ROOT, field_data))\n if not f.startswith(os.path.normpath(settings.MEDIA_ROOT)):\n raise validators.ValidationError, \"Enter a valid filename.\"\n field_list[1].validator_list.append(isWithinMediaRoot)\n return field_list\n\n def get_manipulator_field_objs(self):\n return [formfields.FileUploadField, formfields.HiddenField]\n\n def get_manipulator_field_names(self, name_prefix):\n return [name_prefix + self.name + '_file', name_prefix + self.name]\n\n def save_file(self, new_data, new_object, original_object, change, rel):\n upload_field_name = self.get_manipulator_field_names('')[0]\n if new_data.get(upload_field_name, False):\n if rel:\n getattr(new_object, 'save_%s_file' % self.name)(new_data[upload_field_name][0][\"filename\"], new_data[upload_field_name][0][\"content\"])\n else:\n getattr(new_object, 'save_%s_file' % self.name)(new_data[upload_field_name][\"filename\"], new_data[upload_field_name][\"content\"])\n\n def get_directory_name(self):\n return os.path.normpath(datetime.datetime.now().strftime(self.upload_to))\n\n def get_filename(self, filename):\n from django.utils.text import get_valid_filename\n f = os.path.join(self.get_directory_name(), get_valid_filename(os.path.basename(filename)))\n return os.path.normpath(f)\n\nclass FilePathField(Field):\n def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, **kwargs):\n self.path, self.match, self.recursive = path, match, recursive\n Field.__init__(self, verbose_name, name, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [curry(formfields.FilePathField, path=self.path, match=self.match, recursive=self.recursive)]\n\nclass FloatField(Field):\n empty_strings_allowed = False\n def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs):\n self.max_digits, self.decimal_places = max_digits, decimal_places\n Field.__init__(self, verbose_name, name, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [curry(formfields.FloatField, max_digits=self.max_digits, decimal_places=self.decimal_places)]\n\nclass ImageField(FileField):\n def __init__(self, verbose_name=None, name=None, width_field=None, height_field=None, **kwargs):\n self.width_field, self.height_field = width_field, height_field\n FileField.__init__(self, verbose_name, name, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [formfields.ImageUploadField, formfields.HiddenField]\n\n def save_file(self, new_data, new_object, original_object, change, rel):\n FileField.save_file(self, new_data, new_object, original_object, change, rel)\n # If the image has height and/or width field(s) and they haven't\n # changed, set the width and/or height field(s) back to their original\n # values.\n if change and (self.width_field or self.height_field):\n if self.width_field:\n setattr(new_object, self.width_field, getattr(original_object, self.width_field))\n if self.height_field:\n setattr(new_object, self.height_field, getattr(original_object, self.height_field))\n new_object.save()\n\nclass IntegerField(Field):\n empty_strings_allowed = False\n def get_manipulator_field_objs(self):\n return [formfields.IntegerField]\n\nclass IPAddressField(Field):\n def __init__(self, *args, **kwargs):\n kwargs['maxlength'] = 15\n Field.__init__(self, *args, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [formfields.IPAddressField]\n\nclass NullBooleanField(Field):\n def __init__(self, *args, **kwargs):\n kwargs['null'] = True\n Field.__init__(self, *args, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [formfields.NullBooleanField]\n\nclass PhoneNumberField(IntegerField):\n def get_manipulator_field_objs(self):\n return [formfields.PhoneNumberField]\n\nclass PositiveIntegerField(IntegerField):\n def get_manipulator_field_objs(self):\n return [formfields.PositiveIntegerField]\n\nclass PositiveSmallIntegerField(IntegerField):\n def get_manipulator_field_objs(self):\n return [formfields.PositiveSmallIntegerField]\n\nclass SlugField(Field):\n def __init__(self, *args, **kwargs):\n kwargs['maxlength'] = 50\n kwargs.setdefault('validator_list', []).append(validators.isSlug)\n # Set db_index=True unless it's been set manually.\n if not kwargs.has_key('db_index'):\n kwargs['db_index'] = True\n Field.__init__(self, *args, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [formfields.TextField]\n\nclass SmallIntegerField(IntegerField):\n def get_manipulator_field_objs(self):\n return [formfields.SmallIntegerField]\n\nclass TextField(Field):\n def get_manipulator_field_objs(self):\n return [formfields.LargeTextField]\n\nclass TimeField(Field):\n empty_strings_allowed = False\n def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs):\n self.auto_now, self.auto_now_add = auto_now, auto_now_add\n if auto_now or auto_now_add:\n kwargs['editable'] = False\n Field.__init__(self, verbose_name, name, **kwargs)\n\n def get_db_prep_lookup(self, lookup_type, value):\n if lookup_type == 'range':\n value = [str(v) for v in value]\n else:\n value = str(value)\n return Field.get_db_prep_lookup(self, lookup_type, value)\n\n def pre_save(self, value, add):\n if self.auto_now or (self.auto_now_add and add):\n return datetime.datetime.now().time()\n return value\n\n def get_db_prep_save(self, value):\n # Casts dates into string format for entry into database.\n if value is not None:\n # MySQL will throw a warning if microseconds are given, because it\n # doesn't support microseconds.\n if settings.DATABASE_ENGINE == 'mysql':\n value = value.replace(microsecond=0)\n value = str(value)\n return Field.get_db_prep_save(self, value)\n\n def get_manipulator_field_objs(self):\n return [formfields.TimeField]\n\nclass URLField(Field):\n def __init__(self, verbose_name=None, name=None, verify_exists=True, **kwargs):\n if verify_exists:\n kwargs.setdefault('validator_list', []).append(validators.isExistingURL)\n Field.__init__(self, verbose_name, name, **kwargs)\n\n def get_manipulator_field_objs(self):\n return [formfields.URLField]\n\nclass USStateField(Field):\n def get_manipulator_field_objs(self):\n return [formfields.USStateField]\n\nclass XMLField(TextField):\n def __init__(self, verbose_name=None, name=None, schema_path=None, **kwargs):\n self.schema_path = schema_path\n Field.__init__(self, verbose_name, name, **kwargs)\n\n def get_internal_type(self):\n return \"TextField\"\n\n def get_manipulator_field_objs(self):\n return [curry(formfields.XMLLargeTextField, schema_path=self.schema_path)]\n\nclass ForeignKey(Field):\n empty_strings_allowed = False\n def __init__(self, to, to_field=None, **kwargs):\n try:\n to_name = to._meta.object_name.lower()\n except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT\n assert to == 'self', \"ForeignKey(%r) is invalid. First parameter to ForeignKey must be either a model or the string %r\" % (to, RECURSIVE_RELATIONSHIP_CONSTANT)\n kwargs['verbose_name'] = kwargs.get('verbose_name', '')\n else:\n to_field = to_field or to._meta.pk.name\n kwargs['verbose_name'] = kwargs.get('verbose_name', to._meta.verbose_name)\n\n if kwargs.has_key('edit_inline_type'):\n import warnings\n warnings.warn(\"edit_inline_type is deprecated. Use edit_inline instead.\")\n kwargs['edit_inline'] = kwargs.pop('edit_inline_type')\n\n kwargs['rel'] = ManyToOne(to, to_field,\n num_in_admin=kwargs.pop('num_in_admin', 3),\n min_num_in_admin=kwargs.pop('min_num_in_admin', None),\n max_num_in_admin=kwargs.pop('max_num_in_admin', None),\n num_extra_on_change=kwargs.pop('num_extra_on_change', 1),\n edit_inline=kwargs.pop('edit_inline', False),\n related_name=kwargs.pop('related_name', None),\n limit_choices_to=kwargs.pop('limit_choices_to', None),\n lookup_overrides=kwargs.pop('lookup_overrides', None),\n raw_id_admin=kwargs.pop('raw_id_admin', False))\n Field.__init__(self, **kwargs)\n\n def get_manipulator_field_objs(self):\n rel_field = self.rel.get_related_field()\n if self.rel.raw_id_admin and not isinstance(rel_field, AutoField):\n return rel_field.get_manipulator_field_objs()\n else:\n return [formfields.IntegerField]\n\nclass ManyToManyField(Field):\n def __init__(self, to, **kwargs):\n kwargs['verbose_name'] = kwargs.get('verbose_name', to._meta.verbose_name_plural)\n kwargs['rel'] = ManyToMany(to, kwargs.pop('singular', None),\n num_in_admin=kwargs.pop('num_in_admin', 0),\n related_name=kwargs.pop('related_name', None),\n filter_interface=kwargs.pop('filter_interface', None),\n limit_choices_to=kwargs.pop('limit_choices_to', None),\n raw_id_admin=kwargs.pop('raw_id_admin', False))\n if kwargs[\"rel\"].raw_id_admin:\n kwargs.setdefault(\"validator_list\", []).append(self.isValidIDList)\n Field.__init__(self, **kwargs)\n\n def get_manipulator_field_objs(self):\n if self.rel.raw_id_admin:\n return [formfields.CommaSeparatedIntegerField]\n else:\n choices = self.get_choices(include_blank=False)\n return [curry(formfields.SelectMultipleField, size=min(max(len(choices), 5), 15), choices=choices)]\n\n def get_m2m_db_table(self, original_opts):\n \"Returns the name of the many-to-many 'join' table.\"\n return '%s_%s' % (original_opts.db_table, self.name)\n\n def isValidIDList(self, field_data, all_data):\n \"Validates that the value is a valid list of foreign keys\"\n mod = self.rel.to.get_model_module()\n try:\n pks = map(int, field_data.split(','))\n except ValueError:\n # the CommaSeparatedIntegerField validator will catch this error\n return\n objects = mod.get_in_bulk(pks)\n if len(objects) != len(pks):\n badkeys = [k for k in pks if k not in objects]\n raise validators.ValidationError, \"Please enter valid %s IDs. The value%s %r %s invalid.\" % \\\n (self.verbose_name, len(badkeys) > 1 and 's' or '',\n len(badkeys) == 1 and badkeys[0] or tuple(badkeys),\n len(badkeys) == 1 and \"is\" or \"are\")\n\nclass OneToOneField(IntegerField):\n def __init__(self, to, to_field=None, **kwargs):\n kwargs['verbose_name'] = kwargs.get('verbose_name', 'ID')\n to_field = to_field or to._meta.pk.name\n\n if kwargs.has_key('edit_inline_type'):\n import warnings\n warnings.warn(\"edit_inline_type is deprecated. Use edit_inline instead.\")\n kwargs['edit_inline'] = kwargs.pop('edit_inline_type')\n\n kwargs['rel'] = OneToOne(to, to_field,\n num_in_admin=kwargs.pop('num_in_admin', 0),\n edit_inline=kwargs.pop('edit_inline', False),\n related_name=kwargs.pop('related_name', None),\n limit_choices_to=kwargs.pop('limit_choices_to', None),\n lookup_overrides=kwargs.pop('lookup_overrides', None),\n raw_id_admin=kwargs.pop('raw_id_admin', False))\n kwargs['primary_key'] = True\n IntegerField.__init__(self, **kwargs)\n\nclass ManyToOne:\n def __init__(self, to, field_name, num_in_admin=3, min_num_in_admin=None,\n max_num_in_admin=None, num_extra_on_change=1, edit_inline=False,\n related_name=None, limit_choices_to=None, lookup_overrides=None, raw_id_admin=False):\n try:\n self.to = to._meta\n except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT\n assert to == RECURSIVE_RELATIONSHIP_CONSTANT, \"'to' must be either a model or the string '%s'\" % RECURSIVE_RELATIONSHIP_CONSTANT\n self.to = to\n self.field_name = field_name\n self.num_in_admin, self.edit_inline = num_in_admin, edit_inline\n self.min_num_in_admin, self.max_num_in_admin = min_num_in_admin, max_num_in_admin\n self.num_extra_on_change, self.related_name = num_extra_on_change, related_name\n self.limit_choices_to = limit_choices_to or {}\n self.lookup_overrides = lookup_overrides or {}\n self.raw_id_admin = raw_id_admin\n\n def get_related_field(self):\n \"Returns the Field in the 'to' object to which this relationship is tied.\"\n return self.to.get_field(self.field_name)\n\nclass ManyToMany:\n def __init__(self, to, singular=None, num_in_admin=0, related_name=None,\n filter_interface=None, limit_choices_to=None, raw_id_admin=False):\n self.to = to._meta\n self.singular = singular or to._meta.object_name.lower()\n self.num_in_admin = num_in_admin\n self.related_name = related_name\n self.filter_interface = filter_interface\n self.limit_choices_to = limit_choices_to or {}\n self.edit_inline = False\n self.raw_id_admin = raw_id_admin\n assert not (self.raw_id_admin and self.filter_interface), \"ManyToMany relationships may not use both raw_id_admin and filter_interface\"\n\nclass OneToOne(ManyToOne):\n def __init__(self, to, field_name, num_in_admin=0, edit_inline=False,\n related_name=None, limit_choices_to=None, lookup_overrides=None,\n raw_id_admin=False):\n self.to, self.field_name = to._meta, field_name\n self.num_in_admin, self.edit_inline = num_in_admin, edit_inline\n self.related_name = related_name\n self.limit_choices_to = limit_choices_to or {}\n self.lookup_overrides = lookup_overrides or {}\n self.raw_id_admin = raw_id_admin\n\nclass Admin:\n def __init__(self, fields=None, js=None, list_display=None, list_filter=None, date_hierarchy=None,\n save_as=False, ordering=None, search_fields=None, save_on_top=False, list_select_related=False):\n self.fields = fields\n self.js = js or []\n self.list_display = list_display or ['__repr__']\n self.list_filter = list_filter or []\n self.date_hierarchy = date_hierarchy\n self.save_as, self.ordering = save_as, ordering\n self.search_fields = search_fields or []\n self.save_on_top = save_on_top\n self.list_select_related = list_select_related\n\n def get_field_objs(self, opts):\n \"\"\"\n Returns self.fields, except with fields as Field objects instead of\n field names. If self.fields is None, defaults to putting every\n non-AutoField field with editable=True in a single fieldset.\n \"\"\"\n if self.fields is None:\n field_struct = ((None, {'fields': [f.name for f in opts.fields + opts.many_to_many if f.editable and not isinstance(f, AutoField)]}),)\n else:\n field_struct = self.fields\n new_fieldset_list = []\n for fieldset in field_struct:\n new_fieldset = [fieldset[0], {}]\n new_fieldset[1].update(fieldset[1])\n admin_fields = []\n for field_name_or_list in fieldset[1]['fields']:\n if isinstance(field_name_or_list, basestring):\n admin_fields.append([opts.get_field(field_name_or_list)])\n else:\n admin_fields.append([opts.get_field(field_name) for field_name in field_name_or_list])\n new_fieldset[1]['fields'] = admin_fields\n new_fieldset_list.append(new_fieldset)\n return new_fieldset_list\n"}}
-{"repo": "qos-ch/cal10n", "pr_number": 6, "title": "Verify Ant Task...Take Two", "state": "closed", "merged_at": "2013-04-25T22:52:34Z", "additions": 527, "deletions": 0, "files_changed": ["cal10n-ant-task/src/main/java/ch/qos/cal10n/ant/EnumTypesElement.java", "cal10n-ant-task/src/main/java/ch/qos/cal10n/ant/StringElement.java", "cal10n-ant-task/src/main/java/ch/qos/cal10n/ant/VerifyTask.java", "cal10n-ant-task/src/test/java/ch/qos/cal10n/ant/VerifyTaskTest.java", "cal10n-ant-task/src/test/java/ch/qos/cal10n/ant/testdata/Colors.java", "cal10n-ant-task/src/test/java/ch/qos/cal10n/ant/testdata/DaysOfTheWeek.java"], "files_before": {}, "files_after": {"cal10n-ant-task/src/main/java/ch/qos/cal10n/ant/EnumTypesElement.java": "/*\n * Copyright (c) 2009 QOS.ch All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\npackage ch.qos.cal10n.ant;\n\nimport org.apache.tools.ant.Task;\n\nimport java.util.LinkedList;\nimport java.util.List;\n\npublic class EnumTypesElement extends Task {\n private List enumTypes;\n\n public EnumTypesElement() {\n this.enumTypes = new LinkedList();\n }\n\n public void addEnumType(StringElement enumType) {\n this.enumTypes.add(enumType);\n }\n\n public List getEnumTypes() {\n return this.enumTypes;\n }\n}\n", "cal10n-ant-task/src/main/java/ch/qos/cal10n/ant/StringElement.java": "/*\n * Copyright (c) 2009 QOS.ch All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\npackage ch.qos.cal10n.ant;\n\nimport org.apache.tools.ant.Task;\n\npublic class StringElement extends Task {\n private String str;\n\n public StringElement() {}\n\n public StringElement(String str) {\n this.str = str;\n }\n\n public void addText(String str) {\n this.str = this.getProject().replaceProperties(str);\n }\n\n public String getText() {\n return this.str;\n }\n\n @Override\n public String toString() {\n return this.str;\n }\n}\n", "cal10n-ant-task/src/main/java/ch/qos/cal10n/ant/VerifyTask.java": "/*\n * Copyright (c) 2009 QOS.ch All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\npackage ch.qos.cal10n.ant;\n\nimport ch.qos.cal10n.CAL10NConstants;\nimport ch.qos.cal10n.verifier.IMessageKeyVerifier;\nimport org.apache.tools.ant.BuildException;\nimport org.apache.tools.ant.Task;\nimport org.apache.tools.ant.types.LogLevel;\nimport org.apache.tools.ant.types.Path;\nimport org.apache.tools.ant.types.Reference;\nimport org.apache.tools.ant.util.ClasspathUtils;\n\nimport java.lang.reflect.Constructor;\nimport java.text.MessageFormat;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Locale;\n\npublic class VerifyTask extends Task {\n private List enumTypes;\n private Path classpath;\n\n @Override\n public void init() throws BuildException {\n this.enumTypes = new LinkedList();\n }\n\n @Override\n public void execute() throws BuildException {\n if (this.enumTypes.isEmpty()) {\n throw new BuildException(CAL10NConstants.MISSING_ENUM_TYPES_MSG);\n }\n for (StringElement enumType : this.enumTypes) {\n IMessageKeyVerifier imcv = getMessageKeyVerifierInstance(enumType.getText());\n log(\"Checking all resource bundles for enum type [\" + enumType + \"]\", LogLevel.INFO.getLevel());\n checkAllLocales(imcv);\n }\n }\n\n public void checkAllLocales(IMessageKeyVerifier mcv) {\n String enumClassAsStr = mcv.getEnumTypeAsStr();\n String[] localeNameArray = mcv.getLocaleNames();\n\n if (localeNameArray == null || localeNameArray.length == 0) {\n String errMsg = MessageFormat.format(CAL10NConstants.MISSING_LOCALE_DATA_ANNOTATION_MESSAGE, enumClassAsStr);\n log(errMsg, LogLevel.ERR.getLevel());\n throw new BuildException(errMsg);\n }\n\n boolean failure = false;\n for (String localeName : localeNameArray) {\n Locale locale = new Locale(localeName);\n List errorList = mcv.typeIsolatedVerify(locale);\n if (errorList.size() == 0) {\n String resourceBundleName = mcv.getBaseName();\n log(\"SUCCESSFUL verification for resource bundle [\" + resourceBundleName + \"] for locale [\" + locale + \"]\", LogLevel.INFO.getLevel());\n } else {\n failure = true;\n log(\"FAILURE during verification of resource bundle for locale [\"\n + locale + \"] enum class [\" + enumClassAsStr + \"]\", LogLevel.ERR.getLevel());\n for (String error : errorList) {\n log(error, LogLevel.ERR.getLevel());\n }\n }\n }\n if (failure) {\n throw new BuildException(\"FAIL Verification of [\" + enumClassAsStr + \"] keys.\");\n }\n }\n\n IMessageKeyVerifier getMessageKeyVerifierInstance(String enumClassAsStr) {\n String errMsg = \"Failed to instantiate MessageKeyVerifier class\";\n try {\n ClassLoader classLoader = ClasspathUtils.getClassLoaderForPath(this.getProject(), this.classpath, \"cal10n.VerifyTask\");\n Class> mkvClass = Class.forName(\n CAL10NConstants.MessageKeyVerifier_FQCN, true, classLoader);\n Constructor> mkvCons = mkvClass.getConstructor(String.class);\n return (IMessageKeyVerifier) mkvCons.newInstance(enumClassAsStr);\n } catch (ClassNotFoundException e) {\n throw new BuildException(errMsg, e);\n } catch (NoClassDefFoundError e) {\n throw new BuildException(errMsg, e);\n } catch (Exception e) {\n throw new BuildException(errMsg, e);\n }\n }\n\n public void addClasspath(Path classpath) {\n this.classpath = classpath;\n }\n\n public void addConfiguredEnumTypes(EnumTypesElement enumTypes) {\n this.enumTypes.addAll(enumTypes.getEnumTypes());\n }\n\n public void setClasspath(Path classpath) {\n this.classpath = classpath;\n }\n\n public void setClasspathRef(Reference refId) {\n Path cp = new Path(this.getProject());\n cp.setRefid(refId);\n this.setClasspath(cp);\n }\n\n public void setEnumType(String enumType) {\n this.enumTypes.add(new StringElement(enumType));\n }\n}\n", "cal10n-ant-task/src/test/java/ch/qos/cal10n/ant/VerifyTaskTest.java": "/*\n * Copyright (c) 2009 QOS.ch All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\npackage ch.qos.cal10n.ant;\n\nimport junit.framework.TestSuite;\nimport org.apache.ant.antunit.junit3.AntUnitSuite;\nimport org.apache.ant.antunit.junit4.AntUnitSuiteRunner;\nimport org.junit.runner.RunWith;\n\nimport java.io.File;\nimport java.io.UnsupportedEncodingException;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.net.URLDecoder;\n\n@RunWith(AntUnitSuiteRunner.class)\npublic class VerifyTaskTest {\n public static TestSuite suite() throws URISyntaxException {\n setProperties();\n URL resource = VerifyTask.class.getResource(\"/ch/qos/cal10n/ant/VerifyTaskTest.xml\");\n File file = new File(resource.toURI());\n return new AntUnitSuite(file, VerifyTask.class);\n }\n\n /**\n * Set system properties for use in the AntUnit files.\n */\n private static void setProperties() {\n String name = VerifyTask.class.getName();\n final String resourceName = \"/\" + name.replace('.', '/') + \".class\";\n String absoluteFilePath = VerifyTask.class.getResource(resourceName).getFile();\n try {\n absoluteFilePath = URLDecoder.decode(absoluteFilePath, \"UTF-8\");\n } catch (UnsupportedEncodingException e) {\n throw new RuntimeException(\"Missing UTF-8 encoding in JVM.\", e);\n }\n String classesDir = absoluteFilePath.substring(0, absoluteFilePath.length() - resourceName.length());\n System.setProperty(\"ch.qos.cal10n.ant.VerifyTaskTest.classes.dir\", classesDir);\n }\n}\n", "cal10n-ant-task/src/test/java/ch/qos/cal10n/ant/testdata/Colors.java": "/*\n * Copyright (c) 2009 QOS.ch All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\npackage ch.qos.cal10n.ant.testdata;\n\nimport ch.qos.cal10n.BaseName;\nimport ch.qos.cal10n.Locale;\nimport ch.qos.cal10n.LocaleData;\n\n@BaseName(\"colors\")\n@LocaleData({\n @Locale(\"en\"),\n @Locale(\"fr\")\n})\npublic enum Colors {\n RED,\n BLUE\n}\n", "cal10n-ant-task/src/test/java/ch/qos/cal10n/ant/testdata/DaysOfTheWeek.java": "package ch.qos.cal10n.ant.testdata;\n\nimport ch.qos.cal10n.BaseName;\nimport ch.qos.cal10n.Locale;\nimport ch.qos.cal10n.LocaleData;\n\n@BaseName(\"daysoftheweek\")\n@LocaleData({\n @Locale(\"en\"),\n @Locale(\"fr\")\n})\npublic enum DaysOfTheWeek {\n MONDAY,\n TUESDAY,\n WEDNESDAY,\n THURSDAY,\n FRIDAY,\n SATURDAY,\n SUNDAY\n}\n"}}
-{"repo": "scribtex/clsi", "pr_number": 1, "title": "Update readme and config", "state": "open", "merged_at": null, "additions": 2, "deletions": 2, "files_changed": ["app/models/compile.rb"], "files_before": {"app/models/compile.rb": "class Compile\n attr_accessor :token, :user,\n :root_resource_path, :resources, \n :compiler, :output_format\n attr_reader :output_files, :log_files, :unique_id,\n :status, :error_type, :error_message,\n :bibtex_ran, :makeindex_ran\n\n POSSIBLE_COMPILER_OUTPUT_FORMATS = {\n :pdflatex => ['pdf'],\n :latex => ['dvi', 'pdf', 'ps'],\n :xelatex => ['pdf']\n }\n\n def compiler\n @compiler ||= 'pdflatex'\n end\n\n def output_format\n @output_format ||= 'pdf'\n end\n\n def initialize(attributes = {})\n self.root_resource_path = attributes[:root_resource_path] || \"main.tex\"\n self.token = attributes[:token]\n \n self.compiler = attributes[:compiler]\n self.output_format = attributes[:output_format]\n\n self.resources = []\n for resource in attributes[:resources].to_a\n self.resources << Resource.new(\n resource[:path],\n resource[:modified_date],\n resource[:content],\n resource[:url],\n self\n )\n end\n \n @output_files = []\n @log_files = []\n @status = :unprocessed\n @bibtex_ran = false\n @makeindex_ran = false\n end\n\n def compile\n @start_time = Time.now\n validate_compile\n write_resources_to_disk\n do_compile\n convert_to_output_format\n move_compiled_files_to_public_dir\n @status = :success\n rescue CLSI::CompileError => e\n @status = :failure\n @error_type = e.class.name.demodulize\n @error_message = e.message\n ensure\n move_log_files_to_public_dir\n write_response_to_public_dir\n remove_compile_directory unless PRESERVE_COMPILE_DIRECTORIES\n record_in_compile_log\n end\n\n def validate_compile\n if self.user.blank?\n self.user = User.find_by_token(self.token)\n raise CLSI::InvalidToken, 'user does not exist' if self.user.nil?\n end\n \n unless POSSIBLE_COMPILER_OUTPUT_FORMATS.has_key?(self.compiler.to_sym)\n raise CLSI::UnknownCompiler, \"#{self.compiler} is not a valid compiler\"\n end\n \n unless POSSIBLE_COMPILER_OUTPUT_FORMATS[self.compiler.to_sym].include?(self.output_format)\n raise CLSI::ImpossibleOutputFormat, \"#{self.compiler} cannot produce #{self.output_format} output\"\n end\n end\n \n def unique_id\n @unique_id ||= generate_unique_string\n end\n \n def compile_directory\n @compile_directory ||= File.join(LATEX_COMPILE_DIR, self.unique_id)\n end\n \n def to_xml\n xml = Builder::XmlMarkup.new\n xml.instruct!\n\n xml.compile do\n xml.compile_id(self.unique_id)\n \n if self.status == :failure\n xml.status('failure')\n xml.error do\n xml.type self.error_type\n xml.message self.error_message\n end\n else\n xml.status(self.status.to_s)\n end\n \n unless self.output_files.empty?\n xml.output do\n for file in self.output_files\n xml.file(:url => file.url, :type => file.type, :mimetype => file.mimetype)\n end\n end\n end\n \n unless self.log_files.empty?\n xml.logs do\n for file in self.log_files\n xml.file(:url => file.url, :type => file.type, :mimetype => file.mimetype)\n end\n end\n end\n end\n end\n \n def to_json\n hash = {\n 'compile_id' => self.unique_id,\n 'status' => self.status.to_s\n }\n \n if self.status == :failure\n hash['error'] = {\n 'type' => self.error_type,\n 'message' => self.error_message\n }\n end\n \n unless self.output_files.empty?\n hash['output_files'] = self.output_files.collect{|of| {\n 'url' => of.url,\n 'mimetype' => of.mimetype,\n 'type' => of.type\n }}\n end\n \n unless self.log_files.empty?\n hash['logs'] = self.log_files.collect{|lf| {\n 'url' => lf.url,\n 'mimetype' => lf.mimetype,\n 'type' => lf.type\n }}\n end\n \n return ({'compile' => hash}).to_json\n end\n\nprivate\n\n def write_resources_to_disk\n File.umask(0002)\n \n for resource in self.resources.to_a\n resource.write_to_disk\n end\n end\n\n def do_compile\n run_compiler\n \n aux_file_content = read_aux_files\n if aux_file_content.include? '\\\\citation' or aux_file_content.include? '\\\\bibdata' or aux_file_content.include? '\\\\bibstyle'\n modify_aux_files\n run_bibtex\n run_latex_again = true\n @bibtex_ran = true\n end\n \n if File.exist?(File.join(compile_directory, 'output.idx'))\n run_makeindex\n run_latex_again = true\n @makeindex_ran = true\n end\n\n if File.exist?(File.join(compile_directory, 'output.toc'))\n # We have a table of contents that needs to be included\n run_latex_again = true\n end\n \n if log_complains_about_references? or run_latex_again\n run_compiler\n end\n \n if log_complains_about_references?\n run_compiler\n end\n\n if log_complains_about_references?\n run_compiler\n end\n end\n\n def log_complains_about_references?\n log_content = read_log\n log_content.include?('There were undefined references') ||\n log_content.include?('There were undefined citations') ||\n log_content.include?('LaTeX Warning: Label(s) may have changed. Rerun to get cross-references right.') ||\n log_content.include?('LaTeX Warning: Citation') || # Natbib\n log_content.include?('No file output.toc') ||\n log_content.include?('Rerun LaTeX') # The longtables package\n end\n \n def run_bibtex\n bibtex_command = ['env', tex_env_variables, BIBTEX_COMMAND, \"#{compile_directory_rel_to_chroot}/output\"].flatten\n run_with_timeout(bibtex_command, BIBTEX_TIMEOUT)\n end\n \n def run_makeindex\n makeindex_command = [\n MAKEINDEX_COMMAND,\n '-o', \"#{compile_directory_rel_to_chroot}/output.ind\",\n \"#{compile_directory_rel_to_chroot}/output.idx\"\n ]\n run_with_timeout(makeindex_command, COMPILE_TIMEOUT)\n end\n \n def run_compiler\n run_with_timeout(compile_command, COMPILE_TIMEOUT)\n end\n \n def read_aux_files\n aux_file_paths = Dir.entries(self.compile_directory).reject{|e| not e.match(/\\.aux$/)}\n aux_file_paths.collect!{|p| File.join(self.compile_directory, p)}\n return aux_file_paths.collect{|p| File.read(p)}.join(\"\\n\")\n end\n \n def modify_aux_files\n aux_file_names = Dir.entries(self.compile_directory).reject{|e| not e.match(/\\.aux$/)}\n aux_file_paths = aux_file_names.collect{|n| File.join(self.compile_directory, n)}\n for aux_file in aux_file_paths\n content = File.read(aux_file)\n content.gsub!(/^\\\\@input\\{(.*)\\}$/, \"\\\\@input{#{compile_directory_rel_to_chroot}/\\\\1}\")\n File.open(aux_file, 'w') {|f|\n f.write(content)\n }\n end\n end\n \n def read_log\n log_file_path = File.join(self.compile_directory, 'output.log')\n return '' unless File.exist?(log_file_path)\n File.read(log_file_path)\n end\n \n def convert_to_output_format\n case self.compiler\n when 'pdflatex'\n input_format = 'pdf'\n when 'latex'\n input_format = 'dvi'\n when 'xelatex'\n input_format = 'pdf'\n end\n ensure_output_files_exist(input_format)\n conversion_method = \"convert_#{input_format}_to_#{self.output_format}\"\n if self.respond_to?(conversion_method, true)\n self.send(conversion_method)\n else\n raise CLSI::ImpossibleFormatConversion, \"can't convert #{input_format} to #{self.output_format}\"\n end\n end\n\n def move_compiled_files_to_public_dir\n FileUtils.mkdir_p(File.join(SERVER_PUBLIC_DIR, 'output', self.unique_id))\n \n for existing_file in find_output_files_of_type(self.output_format)\n existing_path = File.join(compile_directory, existing_file)\n relative_output_path = File.join(relative_output_dir, existing_file)\n output_path = File.join(SERVER_PUBLIC_DIR, relative_output_path)\n FileUtils.mv(existing_path, output_path)\n @output_files << OutputFile.new(:path => relative_output_path)\n end\n end\n \n def move_log_files_to_public_dir\n FileUtils.mkdir_p(output_dir)\n \n existing_log_path = File.join(compile_directory, 'output.log')\n relative_output_log_path = File.join(relative_output_dir, 'output.log')\n if File.exist?(existing_log_path)\n FileUtils.mv(existing_log_path, File.join(SERVER_PUBLIC_DIR, relative_output_log_path))\n @log_files << OutputFile.new(:path => relative_output_log_path)\n end\n end\n \n def remove_compile_directory\n FileUtils.rm_rf(self.compile_directory)\n end\n \n def write_response_to_public_dir\n FileUtils.mkdir_p(output_dir)\n File.open(File.join(output_dir, 'response.xml'), 'w') do |f|\n f.write(self.to_xml)\n end\n File.open(File.join(output_dir, 'response.json'), 'w') do |f|\n f.write(self.to_json)\n end\n end\n \n def record_in_compile_log\n CompileLog.create(\n :user => user,\n :time_taken => ((Time.now.to_f - @start_time.to_f) * 1000).to_i, # Time in milliseconds\n :bibtex_ran => @bibtex_ran,\n :makeindex_ran => @makeindex_ran\n )\n end\n \n def tex_env_variables\n root_and_relative_directories = [\n File.join(compile_directory_rel_to_chroot, File.dirname(self.root_resource_path)),\n compile_directory_rel_to_chroot\n ].join(\":\") + \":\"\n return [\n \"TEXMFOUTPUT=#{compile_directory_rel_to_chroot}\",\n \"TEXINPUTS=#{root_and_relative_directories}\",\n \"BIBINPUTS=#{root_and_relative_directories}\",\n \"BSTINPUTS=#{root_and_relative_directories}\",\n \"TEXFONTS=#{root_and_relative_directories}\",\n \"TFMFONTS=#{root_and_relative_directories}\"\n ]\n end\n \n def output_dir\n File.join(SERVER_PUBLIC_DIR, relative_output_dir)\n end\n \n def relative_output_dir\n File.join('output', self.unique_id)\n end\n \n def compile_directory_rel_to_chroot\n @compile_directory_rel_to_chroot ||= File.join(LATEX_COMPILE_DIR_RELATIVE_TO_CHROOT, self.unique_id)\n end\n \n def compile_command\n case self.compiler\n when 'pdflatex'\n command = PDFLATEX_COMMAND\n when 'latex'\n command = LATEX_COMMAND\n when 'xelatex'\n command = XELATEX_COMMAND\n else\n raise NotImplemented # Previous checking means we should never get here!\n end\n return [\"env\"] + tex_env_variables + [command, \"-interaction=batchmode\",\n \"-output-directory=#{compile_directory_rel_to_chroot}\", \"-no-shell-escape\", \n \"-jobname=output\", self.root_resource_path]\n end\n \n # Returns a list of output files of the given type. Will raise a CLSI::NoOutputFile if no output\n # files of the given type exist.\n def find_output_files_of_type(type)\n file_name = \"output.#{type}\"\n output_path = File.join(compile_directory, file_name)\n raise CLSI::NoOutputProduced, 'no compiled documents were produced' unless File.exist?(output_path)\n return [file_name]\n end\n \n def ensure_output_files_exist(type)\n find_output_files_of_type(type)\n end\n \n def convert_pdf_to_pdf\n # Nothing to do!\n end\n \n def convert_dvi_to_dvi\n # Nothing to do! \n end\n \n def convert_dvi_to_pdf\n input = File.join(compile_directory_rel_to_chroot, 'output.dvi')\n output = File.join(compile_directory_rel_to_chroot, 'output.pdf')\n\n # Note: Adding &> /dev/null to this command makes run_with_timeout return straight away before\n # command is complete, and I have no idea why. Solution: Don't add it.\n dvipdf_command = \"env TEXPICTS=#{compile_directory_rel_to_chroot} #{DVIPDF_COMMAND} \\\"#{input}\\\" \\\"#{output}\\\"\"\n run_with_timeout(dvipdf_command, DVIPDF_TIMEOUT)\n end\n \n def convert_dvi_to_ps\n input = File.join(compile_directory_rel_to_chroot, 'output.dvi')\n output = File.join(compile_directory_rel_to_chroot, 'output.ps')\n dvips_command = \"env TEXPICTS=#{compile_directory_rel_to_chroot} #{DVIPS_COMMAND} -o \\\"#{output}\\\" \\\"#{input}\\\"\"\n run_with_timeout(dvips_command, DVIPS_TIMEOUT)\n end\n \n # Everything below here is copied from the mathwiki code. It was ugly when\n # I first wrote it and it hasn't improved with time. \n # Fixing it would be good.\n def run_with_timeout(command, timeout = 10)\n start_time = Time.now\n pid = fork {\n exec(*command)\n }\n while Time.now - start_time < timeout\n if Process.waitpid(pid, Process::WNOHANG)\n Rails.logger.info \"(#{Time.now - start_time} seconds) #{command.to_a.join(' ')}\"\n return pid\n end\n sleep 0.1 if (Time.now - start_time > 0.3) # No need to check too often if it's taking a while\n end\n \n # Process never finished\n kill_process(pid)\n raise CLSI::Timeout, \"the compile took too long to run and was aborted\"\n end\n \n def kill_process(pid)\n child_pids = %x[ps -e -o 'ppid pid' | awk '$1 == #{pid} { print $2 }'].split\n child_pids.collect{|cpid| kill_process(cpid.to_i)}\n Process.kill('INT', pid)\n Process.kill('HUP', pid)\n Process.kill('KILL', pid)\n end\nend\n"}, "files_after": {"app/models/compile.rb": "class Compile\n attr_accessor :token, :user,\n :root_resource_path, :resources, \n :compiler, :output_format\n attr_reader :output_files, :log_files, :unique_id,\n :status, :error_type, :error_message,\n :bibtex_ran, :makeindex_ran\n\n POSSIBLE_COMPILER_OUTPUT_FORMATS = {\n :pdflatex => ['pdf'],\n :latex => ['dvi', 'pdf', 'ps'],\n :xelatex => ['pdf']\n }\n\n def compiler\n @compiler ||= 'pdflatex'\n end\n\n def output_format\n @output_format ||= 'pdf'\n end\n\n def initialize(attributes = {})\n self.root_resource_path = attributes[:root_resource_path] || \"main.tex\"\n self.token = attributes[:token]\n \n self.compiler = attributes[:compiler]\n self.output_format = attributes[:output_format]\n\n self.resources = []\n for resource in attributes[:resources].to_a\n self.resources << Resource.new(\n resource[:path],\n resource[:modified_date],\n resource[:content],\n resource[:url],\n self\n )\n end\n \n @output_files = []\n @log_files = []\n @status = :unprocessed\n @bibtex_ran = false\n @makeindex_ran = false\n end\n\n def compile\n @start_time = Time.now\n validate_compile\n write_resources_to_disk\n do_compile\n convert_to_output_format\n move_compiled_files_to_public_dir\n @status = :success\n rescue CLSI::CompileError => e\n @status = :failure\n @error_type = e.class.name.demodulize\n @error_message = e.message\n ensure\n move_log_files_to_public_dir\n write_response_to_public_dir\n remove_compile_directory unless PRESERVE_COMPILE_DIRECTORIES\n record_in_compile_log\n end\n\n def validate_compile\n if self.user.blank?\n self.user = User.find_by_token(self.token)\n raise CLSI::InvalidToken, 'user does not exist' if self.user.nil?\n end\n \n unless POSSIBLE_COMPILER_OUTPUT_FORMATS.has_key?(self.compiler.to_sym)\n raise CLSI::UnknownCompiler, \"#{self.compiler} is not a valid compiler\"\n end\n \n unless POSSIBLE_COMPILER_OUTPUT_FORMATS[self.compiler.to_sym].include?(self.output_format)\n raise CLSI::ImpossibleOutputFormat, \"#{self.compiler} cannot produce #{self.output_format} output\"\n end\n end\n \n def unique_id\n @unique_id ||= generate_unique_string\n end\n \n def compile_directory\n @compile_directory ||= File.join(LATEX_COMPILE_DIR, self.unique_id)\n end\n \n def to_xml\n xml = Builder::XmlMarkup.new\n xml.instruct!\n\n xml.compile do\n xml.compile_id(self.unique_id)\n \n if self.status == :failure\n xml.status('failure')\n xml.error do\n xml.type self.error_type\n xml.message self.error_message\n end\n else\n xml.status(self.status.to_s)\n end\n \n unless self.output_files.empty?\n xml.output do\n for file in self.output_files\n xml.file(:url => file.url, :type => file.type, :mimetype => file.mimetype)\n end\n end\n end\n \n unless self.log_files.empty?\n xml.logs do\n for file in self.log_files\n xml.file(:url => file.url, :type => file.type, :mimetype => file.mimetype)\n end\n end\n end\n end\n end\n \n def to_json\n hash = {\n 'compile_id' => self.unique_id,\n 'status' => self.status.to_s\n }\n \n if self.status == :failure\n hash['error'] = {\n 'type' => self.error_type,\n 'message' => self.error_message\n }\n end\n \n unless self.output_files.empty?\n hash['output_files'] = self.output_files.collect{|of| {\n 'url' => of.url,\n 'mimetype' => of.mimetype,\n 'type' => of.type\n }}\n end\n \n unless self.log_files.empty?\n hash['logs'] = self.log_files.collect{|lf| {\n 'url' => lf.url,\n 'mimetype' => lf.mimetype,\n 'type' => lf.type\n }}\n end\n \n return ({'compile' => hash}).to_json\n end\n\nprivate\n\n def write_resources_to_disk\n File.umask(0002)\n \n for resource in self.resources.to_a\n resource.write_to_disk\n end\n end\n\n def do_compile\n run_compiler\n \n aux_file_content = read_aux_files\n if aux_file_content.include? '\\\\citation' or aux_file_content.include? '\\\\bibdata' or aux_file_content.include? '\\\\bibstyle'\n modify_aux_files\n run_bibtex\n run_latex_again = true\n @bibtex_ran = true\n end\n \n if File.exist?(File.join(compile_directory, 'output.idx'))\n run_makeindex\n run_latex_again = true\n @makeindex_ran = true\n end\n\n if File.exist?(File.join(compile_directory, 'output.toc'))\n # We have a table of contents that needs to be included\n run_latex_again = true\n end\n \n if log_complains_about_references? or run_latex_again\n run_compiler\n end\n \n if log_complains_about_references?\n run_compiler\n end\n\n if log_complains_about_references?\n run_compiler\n end\n end\n\n def log_complains_about_references?\n log_content = read_log\n log_content.include?('There were undefined references') ||\n log_content.include?('There were undefined citations') ||\n log_content.include?('LaTeX Warning: Label(s) may have changed. Rerun to get cross-references right.') ||\n log_content.include?('LaTeX Warning: Citation') || # Natbib\n log_content.include?('No file output.toc') ||\n log_content.include?('Rerun LaTeX') # The longtables package\n end\n \n def run_bibtex\n bibtex_command = ['env', tex_env_variables, BIBTEX_COMMAND, \"#{compile_directory_rel_to_chroot}/output\"].flatten\n run_with_timeout(bibtex_command, BIBTEX_TIMEOUT)\n end\n \n def run_makeindex\n makeindex_command = [\n MAKEINDEX_COMMAND,\n '-o', \"#{compile_directory_rel_to_chroot}/output.ind\",\n \"#{compile_directory_rel_to_chroot}/output.idx\"\n ]\n run_with_timeout(makeindex_command, COMPILE_TIMEOUT)\n end\n \n def run_compiler\n run_with_timeout(compile_command, COMPILE_TIMEOUT)\n end\n \n def read_aux_files\n aux_file_paths = Dir.entries(self.compile_directory).reject{|e| not e.match(/\\.aux$/)}\n aux_file_paths.collect!{|p| File.join(self.compile_directory, p)}\n return aux_file_paths.collect{|p| File.read(p)}.join(\"\\n\")\n end\n \n def modify_aux_files\n aux_file_names = Dir.entries(self.compile_directory).reject{|e| not e.match(/\\.aux$/)}\n aux_file_paths = aux_file_names.collect{|n| File.join(self.compile_directory, n)}\n for aux_file in aux_file_paths\n content = File.read(aux_file)\n content.gsub!(/^\\\\@input\\{(.*)\\}$/, \"\\\\@input{#{compile_directory_rel_to_chroot}/\\\\1}\")\n File.open(aux_file, 'w') {|f|\n f.write(content)\n }\n end\n end\n \n def read_log\n log_file_path = File.join(self.compile_directory, 'output.log')\n return '' unless File.exist?(log_file_path)\n File.read(log_file_path)\n end\n \n def convert_to_output_format\n case self.compiler\n when 'pdflatex'\n input_format = 'pdf'\n when 'latex'\n input_format = 'dvi'\n when 'xelatex'\n input_format = 'pdf'\n end\n ensure_output_files_exist(input_format)\n conversion_method = \"convert_#{input_format}_to_#{self.output_format}\"\n if self.respond_to?(conversion_method, true)\n self.send(conversion_method)\n else\n raise CLSI::ImpossibleFormatConversion, \"can't convert #{input_format} to #{self.output_format}\"\n end\n end\n\n def move_compiled_files_to_public_dir\n FileUtils.mkdir_p(File.join(SERVER_PUBLIC_DIR, 'output', self.unique_id))\n \n for existing_file in find_output_files_of_type(self.output_format)\n existing_path = File.join(compile_directory, existing_file)\n relative_output_path = File.join(relative_output_dir, existing_file)\n output_path = File.join(SERVER_PUBLIC_DIR, relative_output_path)\n FileUtils.mv(existing_path, output_path)\n @output_files << OutputFile.new(:path => relative_output_path)\n end\n end\n \n def move_log_files_to_public_dir\n FileUtils.mkdir_p(output_dir)\n \n existing_log_path = File.join(compile_directory, 'output.log')\n relative_output_log_path = File.join(relative_output_dir, 'output.log')\n if File.exist?(existing_log_path)\n FileUtils.mv(existing_log_path, File.join(SERVER_PUBLIC_DIR, relative_output_log_path))\n @log_files << OutputFile.new(:path => relative_output_log_path)\n end\n end\n \n def remove_compile_directory\n FileUtils.rm_rf(self.compile_directory)\n end\n \n def write_response_to_public_dir\n FileUtils.mkdir_p(output_dir)\n File.open(File.join(output_dir, 'response.xml'), 'w') do |f|\n f.write(self.to_xml)\n end\n File.open(File.join(output_dir, 'response.json'), 'w') do |f|\n f.write(self.to_json)\n end\n end\n \n def record_in_compile_log\n CompileLog.create(\n :user => user,\n :time_taken => ((Time.now.to_f - @start_time.to_f) * 1000).to_i, # Time in milliseconds\n :bibtex_ran => @bibtex_ran,\n :makeindex_ran => @makeindex_ran\n )\n end\n \n def tex_env_variables\n root_and_relative_directories = [\n File.join(compile_directory_rel_to_chroot, File.dirname(self.root_resource_path)),\n compile_directory_rel_to_chroot\n ].join(\":\") + \":\"\n return [\n \"TEXMFOUTPUT=#{compile_directory_rel_to_chroot}\",\n \"TEXINPUTS=#{root_and_relative_directories}\",\n \"BIBINPUTS=#{root_and_relative_directories}\",\n \"BSTINPUTS=#{root_and_relative_directories}\",\n \"TEXFONTS=#{root_and_relative_directories}\",\n \"TFMFONTS=#{root_and_relative_directories}\"\n ]\n end\n \n def output_dir\n File.join(SERVER_PUBLIC_DIR, relative_output_dir)\n end\n \n def relative_output_dir\n File.join('output', self.unique_id)\n end\n \n def compile_directory_rel_to_chroot\n @compile_directory_rel_to_chroot ||= File.join(LATEX_COMPILE_DIR_RELATIVE_TO_CHROOT, self.unique_id)\n end\n \n def compile_command\n case self.compiler\n when 'pdflatex'\n command = PDFLATEX_COMMAND\n when 'latex'\n command = LATEX_COMMAND\n when 'xelatex'\n command = XELATEX_COMMAND\n else\n raise NotImplemented # Previous checking means we should never get here!\n end\n return [\"env\"] + tex_env_variables + [command, \"-interaction=batchmode\",\n \"-output-directory=#{compile_directory_rel_to_chroot}\", \"-no-shell-escape\", \n \"-jobname=output\", self.root_resource_path]\n end\n \n # Returns a list of output files of the given type. Will raise a CLSI::NoOutputFile if no output\n # files of the given type exist.\n def find_output_files_of_type(type)\n file_name = \"output.#{type}\"\n output_path = File.join(compile_directory, file_name)\n raise CLSI::NoOutputProduced, 'no compiled documents were produced' unless File.exist?(output_path)\n return [file_name]\n end\n \n def ensure_output_files_exist(type)\n find_output_files_of_type(type)\n end\n \n def convert_pdf_to_pdf\n # Nothing to do!\n end\n \n def convert_dvi_to_dvi\n # Nothing to do! \n end\n \n def convert_dvi_to_pdf\n input = File.join(compile_directory_rel_to_chroot, 'output.dvi')\n output = File.join(compile_directory_rel_to_chroot, 'output.pdf')\n\n # Note: Adding &> /dev/null to this command makes run_with_timeout return straight away before\n # command is complete, and I have no idea why. Solution: Don't add it.\n dvipdf_command = \"env TEXPICTS=#{compile_directory_rel_to_chroot} #{DVIPDF_COMMAND} -o \\\"#{output}\\\" \\\"#{input}\\\"\" \n run_with_timeout(dvipdf_command, DVIPDF_TIMEOUT)\n end\n \n def convert_dvi_to_ps\n input = File.join(compile_directory_rel_to_chroot, 'output.dvi')\n output = File.join(compile_directory_rel_to_chroot, 'output.ps')\n dvips_command = \"env TEXPICTS=#{compile_directory_rel_to_chroot} #{DVIPS_COMMAND} -o \\\"#{output}\\\" \\\"#{input}\\\"\"\n run_with_timeout(dvips_command, DVIPS_TIMEOUT)\n end\n \n # Everything below here is copied from the mathwiki code. It was ugly when\n # I first wrote it and it hasn't improved with time. \n # Fixing it would be good.\n def run_with_timeout(command, timeout = 10)\n start_time = Time.now\n pid = fork {\n exec(*command)\n }\n while Time.now - start_time < timeout\n if Process.waitpid(pid, Process::WNOHANG)\n Rails.logger.info \"(#{Time.now - start_time} seconds) #{command.to_a.join(' ')}\"\n return pid\n end\n sleep 0.1 if (Time.now - start_time > 0.3) # No need to check too often if it's taking a while\n end\n \n # Process never finished\n kill_process(pid)\n raise CLSI::Timeout, \"the compile took too long to run and was aborted\"\n end\n \n def kill_process(pid)\n child_pids = %x[ps -e -o 'ppid pid' | awk '$1 == #{pid} { print $2 }'].split\n child_pids.collect{|cpid| kill_process(cpid.to_i)}\n Process.kill('INT', pid)\n Process.kill('HUP', pid)\n Process.kill('KILL', pid)\n end\nend\n"}}
-{"repo": "faustocintra/GExtenso", "pr_number": 3, "title": "Class to Module + upgrades", "state": "open", "merged_at": null, "additions": 211, "deletions": 27, "files_changed": ["GExtenso.rb"], "files_before": {"GExtenso.rb": "#!/usr/bin/env ruby\n\n##############################################################################################################\n# ATEN\u00c7\u00c3O: Este \u00e9 o meu primeiro trabalho na linguagem Ruby. A l\u00f3gica foi originalmente desenvolvida em PHP; #\n# portanto, o estilo do c\u00f3digo pode n\u00e3o agradar programadores Ruby experientes. Estou aberto a cr\u00edticas #\n# construtivas e sugest\u00f5es, para melhorar meu conhecimento na linguagem. #\n##############################################################################################################\n\n# GExtenso class file\n#\n# author Fausto Gon\u00e7alves Cintra (goncin) \n# link http://goncin.wordpress.com\n# link http://twitter.com/g0nc1n\n# license http://creativecommons.org/licenses/LGPL/2.1/deed.pt\n#\n\n# GExtenso \u00e9 uma classe que gera a representa\u00e7\u00e3o por extenso de um n\u00famero ou valor monet\u00e1rio.\n#\n# ATEN\u00c7\u00c3O: A P\u00c1GINA DE C\u00d3DIGO DESTE ARQUIVO \u00c9 UTF-8 (Unicode)!\n# \n# Sua implementa\u00e7\u00e3o foi feita como prova de conceito, utilizando:\n# * M\u00e9todos est\u00e1ticos, implementando o padr\u00e3o de projeto (\"design pattern\") SINGLETON;\n# * Chamadas recursivas a m\u00e9todos, minimizando repeti\u00e7\u00f5es e mantendo o c\u00f3digo enxuto; e\n# * Tratamento de erros por interm\u00e9dio de exce\u00e7\u00f5es.\n#\n# = EXEMPLOS DE USO =\n#\n# Para obter o extenso de um n\u00famero, utilize GExtenso.numero.\n# \n# puts GExtenso.numero(832); # oitocentos e trinta e dois\n# puts GExtenso.numero(832, GExtenso::GENERO_FEM) # oitocentas e trinta e duas\n# \n#\n# Para obter o extenso de um valor monet\u00e1rio, utilize GExtenso.moeda.\n# \n# # IMPORTANTE: veja nota sobre o par\u00e2metro 'valor' na documenta\u00e7\u00e3o do m\u00e9todo!\n#\n# puts GExtenso.moeda(15402) # cento e cinquenta e quatro reais e dois centavos\n#\n# puts GExtenso.moeda(47) # quarenta e sete centavos\n#\n# puts GExtenso.moeda(357082, 2,\n# ['peseta', 'pesetas', GExtenso::GENERO_FEM],\n# ['c\u00eantimo', 'c\u00eantimos', GExtenso::GENERO_MASC])\n# # tr\u00eas mil, quinhentas e setenta pesetas e oitenta e dois c\u00eantimos\n#\n# author Fausto Gon\u00e7alves Cintra (goncin) \n# version 0.1 2010-06-10\n \nclass GExtenso\n \n NUM_SING = 0\n NUM_PLURAL = 1\n POS_GENERO = 2\n GENERO_MASC = 0\n GENERO_FEM = 1\n \n VALOR_MAXIMO = 999999999\n \n # As unidades 1 e 2 variam em g\u00eanero, pelo que precisamos de dois conjuntos de strings (masculinas e femininas) para as unidades\n UNIDADES = {\n GENERO_MASC => {\n 1 => 'um',\n 2 => 'dois',\n 3 => 'tr\u00eas',\n 4 => 'quatro',\n 5 => 'cinco',\n 6 => 'seis',\n 7 => 'sete',\n 8 => 'oito',\n 9 => 'nove'\n },\n GENERO_FEM => {\n 1 => 'uma',\n 2 => 'duas',\n 3 => 'tr\u00eas',\n 4 => 'quatro',\n 5 => 'cinco',\n 6 => 'seis',\n 7 => 'sete',\n 8 => 'oito',\n 9 => 'nove'\n }\n }\n \n DE11A19 = {\n 11 => 'onze',\n 12 => 'doze',\n 13 => 'treze',\n 14 => 'quatorze',\n 15 => 'quinze',\n 16 => 'dezesseis',\n 17 => 'dezessete',\n 18 => 'dezoito',\n 19 => 'dezenove'\n }\n \n DEZENAS = {\n 10 => 'dez',\n 20 => 'vinte',\n 30 => 'trinta',\n 40 => 'quarenta',\n 50 => 'cinquenta',\n 60 => 'sessenta',\n 70 => 'setenta',\n 80 => 'oitenta',\n 90 => 'noventa'\n }\n \n CENTENA_EXATA = 'cem'\n \n # As centenas, com exce\u00e7\u00e3o de 'cento', tamb\u00e9m variam em g\u00eanero. Aqui tamb\u00e9m se faz\n # necess\u00e1rio dois conjuntos de strings (masculinas e femininas).\n \n CENTENAS = {\n GENERO_MASC => {\n 100 => 'cento',\n 200 => 'duzentos',\n 300 => 'trezentos',\n 400 => 'quatrocentos',\n 500 => 'quinhentos',\n 600 => 'seiscentos',\n 700 => 'setecentos',\n 800 => 'oitocentos',\n 900 => 'novecentos'\n },\n GENERO_FEM => {\n 100 => 'cento',\n 200 => 'duzentas',\n 300 => 'trezentas',\n 400 => 'quatrocentas',\n 500 => 'quinhentas',\n 600 => 'seiscentas',\n 700 => 'setecentas',\n 800 => 'oitocentas',\n 900 => 'novecentas'\n }\n }\n \n #'Mil' \u00e9 invari\u00e1vel, seja em g\u00eanero, seja em n\u00famero\n MILHAR = 'mil'\n\n MILHOES = {\n NUM_SING => 'milh\u00e3o',\n NUM_PLURAL => 'milh\u00f5es'\n }\n\n UNIDADES_ORDINAL = {\n GENERO_MASC => {\n 1 => 'primeiro',\n 2 => 'segundo',\n 3 => 'terceiro',\n 4 => 'quarto',\n 5 => 'quinto',\n 6 => 'sexto',\n 7 => 's\u00e9timo',\n 8 => 'oitavo',\n 9 => 'nono'},\n GENERO_FEM => {\n 1 => 'primeira',\n 2 => 'segunda',\n 3 => 'terceira',\n 4 => 'quarta',\n 5 => 'quinta',\n 6 => 'sexta',\n 7 => 's\u00e9tima',\n 8 => 'oitava',\n 9 => 'nona'}}\n\n DEZENAS_ORDINAL = {\n GENERO_MASC => {\n 10 => 'd\u00e9cimo',\n 20 => 'vig\u00e9simo',\n 30 => 'trig\u00e9simo',\n 40 => 'quadrag\u00e9simo',\n 50 => 'quinquag\u00e9simo',\n 60 => 'sexag\u00e9simo',\n 70 => 'septuag\u00e9simo',\n 80 => 'octog\u00e9simo',\n 90 => 'nonag\u00e9simo'},\n GENERO_FEM => {\n 10 => 'd\u00e9cima',\n 20 => 'vig\u00e9sima',\n 30 => 'trig\u00e9sima',\n 40 => 'quadrag\u00e9sima',\n 50 => 'quinquag\u00e9sima',\n 60 => 'sexag\u00e9sima',\n 70 => 'septuag\u00e9sima',\n 80 => 'octog\u00e9sima',\n 90 => 'nonag\u00e9sima'}}\n \n CENTENAS_ORDINAL = {\n GENERO_MASC => {\n 100 => 'cent\u00e9simo',\n 200 => 'ducent\u00e9simo',\n 300 => 'trecent\u00e9simo',\n 400 => 'quadringent\u00e9simo',\n 500 => 'quingent\u00e9simo',\n 600 => 'seiscent\u00e9simo',\n 700 => 'septingent\u00e9simo',\n 800 => 'octingent\u00e9simo',\n 900 => 'noningent\u00e9simo'},\n GENERO_FEM => {\n 100 => 'cent\u00e9sima',\n 200 => 'ducent\u00e9sima',\n 300 => 'trecent\u00e9sima',\n 400 => 'quadringent\u00e9sima',\n 500 => 'quingent\u00e9sima',\n 600 => 'seiscent\u00e9sima',\n 700 => 'septingent\u00e9sima',\n 800 => 'octingent\u00e9sima',\n 900 => 'noningent\u00e9sima'}}\n \n \n MILHAR_ORDINAL = {\n GENERO_MASC => {\n 1000 => 'mil\u00e9simo'},\n GENERO_FEM =>{\n 1000 => 'mil\u00e9sima'}}\n \n def self.is_int(s)\n Integer(s) != nil rescue false\n end\n \n #######################################################################################################################################\n \n def self.numero (valor, genero = GENERO_MASC)\n\n # Gera a representa\u00e7\u00e3o por extenso de um n\u00famero inteiro, maior que zero e menor ou igual a VALOR_MAXIMO.\n #\n # PAR\u00c2METROS:\n # valor (Integer) O valor num\u00e9rico cujo extenso se deseja gerar\n #\n # genero (Integer) [Opcional; valor padr\u00e3o: GExtenso::GENERO_MASC] O g\u00eanero gramatical (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n # do extenso a ser gerado. Isso possibilita distinguir, por exemplo, entre 'duzentos e dois homens' e 'duzentas e duas mulheres'.\n #\n # VALOR DE RETORNO:\n # (String) O n\u00famero por extenso\n \n # ----- VALIDA\u00c7\u00c3O DOS PAR\u00c2METROS DE ENTRADA ---- \n \n if !is_int(valor)\n raise \"[Exce\u00e7\u00e3o em GExtenso.numero] Par\u00e2metro 'valor' n\u00e3o \u00e9 num\u00e9rico (recebido: '#{valor}')\"\n elsif valor <= 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.numero] Par\u00e2metro 'valor' igual a ou menor que zero (recebido: '#{valor}')\"\n elsif valor > VALOR_MAXIMO\n raise '[Exce\u00e7\u00e3o em GExtenso::numero] Par\u00e2metro ''valor'' deve ser um inteiro entre 1 e ' + VALOR_MAXIMO.to_s + \" (recebido: '#{valor}')\"\n elsif genero != GENERO_MASC && genero != GENERO_FEM\n raise \"Exce\u00e7\u00e3o em GExtenso: valor incorreto para o par\u00e2metro 'genero' (recebido: '#{genero}')\"\n\n # ------------------------------------------------\n\n elsif valor >= 1 && valor <= 9\n UNIDADES[genero][valor]\n \n elsif valor == 10\n DEZENAS[valor]\n\n elsif valor >= 11 && valor <= 19\n DE11A19[valor]\n \n elsif valor >= 20 && valor <= 99\n dezena = valor - (valor % 10)\n ret = DEZENAS[dezena]\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre dezenas e unidades.\n resto = valor - dezena\n if resto > 0\n ret += ' e ' + self.numero(resto, genero)\n end\n ret\n\n elsif valor == 100 \n CENTENA_EXATA\n\n elsif valor >= 101 && valor <= 999\n centena = valor - (valor % 100)\n ret = CENTENAS[genero][centena] # As centenas (exceto 'cento') variam em g\u00eanero\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre centenas e dezenas.\n resto = valor - centena \n if resto > 0\n ret += ' e ' + self.numero(resto, genero)\n end\n ret\n\n elsif valor >= 1000 && valor <= 999999\n # A fun\u00e7\u00e3o 'floor' \u00e9 utilizada para encontrar o inteiro da divis\u00e3o de valor por 1000,\n # assim determinando a quantidade de milhares. O resultado \u00e9 enviado a uma chamada recursiva\n # da fun\u00e7\u00e3o. A palavra 'mil' n\u00e3o se flexiona.\n milhar = (valor / 1000).floor\n ret = self.numero(milhar, GENERO_MASC) + ' ' + MILHAR # 'Mil' \u00e9 do g\u00eanero masculino\n resto = valor % 1000\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre milhares e n\u00fameros entre 1 e 99, bem como antes de centenas exatas.\n if resto > 0 && ((resto >= 1 && resto <= 99) || resto % 100 == 0)\n ret += ' e ' + self.numero(resto, genero)\n # Nos demais casos, ap\u00f3s o milhar \u00e9 utilizada a v\u00edrgula.\n elsif (resto > 0)\n ret += ', ' + self.numero(resto, genero)\n end\n ret\n\n elsif valor >= 100000 && valor <= VALOR_MAXIMO\n # A fun\u00e7\u00e3o 'floor' \u00e9 utilizada para encontrar o inteiro da divis\u00e3o de valor por 1000000,\n # assim determinando a quantidade de milh\u00f5es. O resultado \u00e9 enviado a uma chamada recursiva\n # da fun\u00e7\u00e3o. A palavra 'milh\u00e3o' flexiona-se no plural.\n milhoes = (valor / 1000000).floor\n ret = self.numero(milhoes, GENERO_MASC) + ' ' # Milh\u00e3o e milh\u00f5es s\u00e3o do g\u00eanero masculino\n \n # Se a o n\u00famero de milh\u00f5es for maior que 1, deve-se utilizar a forma flexionada no plural\n ret += milhoes == 1 ? MILHOES[NUM_SING] : MILHOES[NUM_PLURAL]\n\n resto = valor % 1000000\n\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre milh\u00f5es e n\u00fameros entre 1 e 99, bem como antes de centenas exatas.\n if resto && ((resto >= 1 && resto <= 99) || resto % 100 == 0)\n ret += ' e ' + ret.numero(resto, genero)\n # Nos demais casos, ap\u00f3s o milh\u00e3o \u00e9 utilizada a v\u00edrgula.\n elsif resto > 0\n ret += ', ' + self.numero(resto, genero)\n end\n ret\n\n end\n \n end\n \n #######################################################################################################################################\n \n def self.moeda(\n valor,\n casas_decimais = 2,\n info_unidade = ['real', 'reais', GENERO_MASC],\n info_fracao = ['centavo', 'centavos', GENERO_MASC]\n ) \n \n # Gera a representa\u00e7\u00e3o por extenso de um valor monet\u00e1rio, maior que zero e menor ou igual a GExtenso::VALOR_MAXIMO.\n #\n #\n # PAR\u00c2METROS:\n # valor (Integer) O valor monet\u00e1rio cujo extenso se deseja gerar.\n # ATEN\u00c7\u00c3O: PARA EVITAR OS CONHECIDOS PROBLEMAS DE ARREDONDAMENTO COM N\u00daMEROS DE PONTO FLUTUANTE, O VALOR DEVE SER PASSADO\n # J\u00c1 DEVIDAMENTE MULTIPLICADO POR 10 ELEVADO A $casasDecimais (o que equivale, normalmente, a passar o valor com centavos\n # multiplicado por 100)\n #\n # casas_decimais (Integer) [Opcional; valor padr\u00e3o: 2] N\u00famero de casas decimais a serem consideradas como parte fracion\u00e1ria (centavos)\n #\n # info_unidade (Array) [Opcional; valor padr\u00e3o: ['real', 'reais', GExtenso::GENERO_MASC]] Fornece informa\u00e7\u00f5es sobre a moeda a ser\n # utilizada. O primeiro valor da matriz corresponde ao nome da moeda no singular, o segundo ao nome da moeda no plural e o terceiro\n # ao g\u00eanero gramatical do nome da moeda (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n #\n # info_fracao (Array) [Opcional; valor padr\u00e3o: ['centavo', 'centavos', GExtenso::GENERO_MASC]] Prov\u00ea informa\u00e7\u00f5es sobre a parte fracion\u00e1ria\n # da moeda. O primeiro valor da matriz corresponde ao nome da parte fracion\u00e1ria no singular, o segundo ao nome da parte fracion\u00e1ria no plural\n # e o terceiro ao g\u00eanero gramatical da parte fracion\u00e1ria (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n #\n # VALOR DE RETORNO:\n # (String) O valor monet\u00e1rio por extenso\n \n # ----- VALIDA\u00c7\u00c3O DOS PAR\u00c2METROS DE ENTRADA ----\n\n if ! self.is_int(valor)\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'valor' n\u00e3o \u00e9 num\u00e9rico (recebido: '#{valor}')\"\n\n elsif valor <= 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro valor igual a ou menor que zero (recebido: '#{valor}')\"\n\n elsif ! self.is_int(casas_decimais) || casas_decimais < 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'casas_decimais' n\u00e3o \u00e9 num\u00e9rico ou \u00e9 menor que zero (recebido: '#{casas_decimais}')\"\n\n elsif info_unidade.class != Array || info_unidade.length < 3\n temp = info_unidade.class == Array ? '[' + info_unidade.join(', ') + ']' : \"'#{info_unidade}'\"\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'info_unidade' n\u00e3o \u00e9 uma matriz com 3 (tr\u00eas) elementos (recebido: #{temp})\"\n \n elsif info_unidade[POS_GENERO] != GENERO_MASC && info_unidade[POS_GENERO] != GENERO_FEM\n raise \"Exce\u00e7\u00e3o em GExtenso: valor incorreto para o par\u00e2metro 'info_unidade[POS_GENERO]' (recebido: '#{info_unidade[POS_GENERO]}')\"\n\n elsif info_fracao.class != Array || info_fracao.length < 3\n temp = info_fracao.class == Array ? '[' + info_fracao.join(', ') + ']' : \"'#{info_fracao}'\"\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'info_fracao' n\u00e3o \u00e9 uma matriz com 3 (tr\u00eas) elementos (recebido: #{temp})\"\n \n elsif info_fracao[POS_GENERO] != GENERO_MASC && info_fracao[POS_GENERO] != GENERO_FEM\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] valor incorreto para o par\u00e2metro 'info_fracao[POS_GENERO]' (recebido: '#{info_fracao[POS_GENERO]}').\"\n \n end\n\n # -----------------------------------------------\n\n ret = ''\n\n # A parte inteira do valor monet\u00e1rio corresponde ao valor passado dividido por 10 elevado a casas_decimais, desprezado o resto.\n # Assim, com o padr\u00e3o de 2 casas_decimais, o valor ser\u00e1 dividido por 100 (10^2), e o resto \u00e9 descartado utilizando-se floor().\n parte_inteira = valor.floor / (10**casas_decimais)\n\n # A parte fracion\u00e1ria ('centavos'), por seu turno, corresponder\u00e1 ao resto da divis\u00e3o do valor por 10 elevado a casas_decimais.\n # No cen\u00e1rio comum em que trabalhamos com 2 casas_decimais, ser\u00e1 o resto da divis\u00e3o do valor por 100 (10^2).\n fracao = valor % (10**casas_decimais)\n\n # O extenso para a parte_inteira somente ser\u00e1 gerado se esta for maior que zero. Para tanto, utilizamos\n # os pr\u00e9stimos do m\u00e9todo GExtenso::numero().\n if parte_inteira > 0\n ret = self.numero(parte_inteira, info_unidade[POS_GENERO]) + ' '\n ret += parte_inteira == 1 ? info_unidade[NUM_SING] : info_unidade[NUM_PLURAL]\n end\n\n # De forma semelhante, o extenso da fracao somente ser\u00e1 gerado se esta for maior que zero. */\n if fracao > 0\n # Se a parte_inteira for maior que zero, o extenso para ela j\u00e1 ter\u00e1 sido gerado. Antes de juntar os\n # centavos, precisamos colocar o conectivo 'e'.\n if parte_inteira > 0\n ret += ' e '\n end\n ret += self.numero(fracao, info_fracao[POS_GENERO]) + ' '\n ret += parte_inteira == 1 ? info_fracao[NUM_SING] : info_fracao[NUM_PLURAL]\n end\n\n ret\n\n end\n\n ######################################################################################################################################################\n def self.ordinal (valor, genero = GENERO_MASC)\n\n # Gera a representa\u00e7\u00e3o ordinal de um n\u00famero inteiro de 1 \u00e0 1000\n\n # PAR\u00c2METROS:\n # valor (Integer) O valor num\u00e9rico cujo extenso se deseja gerar\n #\n # genero (Integer) [Opcional; valor padr\u00e3o: GExtenso::GENERO_MASC] O g\u00eanero gramatical (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n # do extenso a ser gerado. Isso possibilita distinguir, por exemplo, entre 'duzentos e dois homens' e 'duzentas e duas mulheres'.\n #\n # VALOR DE RETORNO:\n # (String) O n\u00famero por extenso\n \n # ----- VALIDA\u00c7\u00c3O DOS PAR\u00c2METROS DE ENTRADA ---- \n \n if !is_int(valor)\n raise \"[Exce\u00e7\u00e3o em GExtenso.numero] Par\u00e2metro 'valor' n\u00e3o \u00e9 num\u00e9rico (recebido: '#{valor}')\"\n elsif valor <= 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.numero] Par\u00e2metro 'valor' igual a ou menor que zero (recebido: '#{valor}')\"\n elsif valor > VALOR_MAXIMO\n raise '[Exce\u00e7\u00e3o em GExtenso::numero] Par\u00e2metro ''valor'' deve ser um inteiro entre 1 e ' + VALOR_MAXIMO.to_s + \" (recebido: '#{valor}')\"\n elsif genero != GENERO_MASC && genero != GENERO_FEM\n raise \"Exce\u00e7\u00e3o em GExtenso: valor incorreto para o par\u00e2metro 'genero' (recebido: '#{genero}')\"\n # ------------------------------------------------\n elsif valor >= 1 && valor <= 9\n return UNIDADES_ORDINAL[genero][valor]\n elsif valor >= 10 && valor <= 99\n dezena = valor - (valor % 10)\n resto = valor - dezena\n ret = DEZENAS_ORDINAL[genero][dezena]+\" \"\n if resto > 0 then ret+= self.ordinal(resto,genero); end\n return ret\n elsif valor >= 100 && valor <= 999\n centena = valor - (valor % 100)\n resto = valor - centena \n ret = CENTENAS_ORDINAL[genero][centena]+\" \"\n if resto > 0 then ret += self.ordinal(resto, genero); end\n return ret\n elsif valor == 1000\n return MILHAR_ORDINAL[genero][valor]+\" \"\n end\n end\n\n \nend \n"}, "files_after": {"GExtenso.rb": "#!/usr/bin/env ruby\n\n##############################################################################################################\n# ATEN\u00c7\u00c3O: Este \u00e9 o meu primeiro trabalho na linguagem Ruby. A l\u00f3gica foi originalmente desenvolvida em PHP; #\n# portanto, o estilo do c\u00f3digo pode n\u00e3o agradar programadores Ruby experientes. Estou aberto a cr\u00edticas #\n# construtivas e sugest\u00f5es, para melhorar meu conhecimento na linguagem. #\n##############################################################################################################\n\n# GExtenso class file\n#\n# author Fausto Gon\u00e7alves Cintra (goncin) \n# assitente Leonardo Ostan (lostan) \n# link http://goncin.wordpress.com\n# link http://twitter.com/g0nc1n\n# license http://creativecommons.org/licenses/LGPL/2.1/deed.pt\n#\n\n# GExtenso \u00e9 uma classe que gera a representa\u00e7\u00e3o por extenso de um n\u00famero ou valor monet\u00e1rio.\n#\n# ATEN\u00c7\u00c3O: A P\u00c1GINA DE C\u00d3DIGO DESTE ARQUIVO \u00c9 UTF-8 (Unicode)!\n# \n# Sua implementa\u00e7\u00e3o foi feita como prova de conceito, utilizando:\n# * M\u00e9todos est\u00e1ticos, implementando o padr\u00e3o de projeto (\"design pattern\") SINGLETON;\n# * Chamadas recursivas a m\u00e9todos, minimizando repeti\u00e7\u00f5es e mantendo o c\u00f3digo enxuto; e\n# * Tratamento de erros por interm\u00e9dio de exce\u00e7\u00f5es.\n#\n# = EXEMPLOS DE USO =\n#\n# Para obter o extenso de um n\u00famero, utilize GExtenso.numero.\n# \n# puts GExtenso.numero(832); # oitocentos e trinta e dois\n# puts GExtenso.numero(832, GExtenso::GENERO_FEM) # oitocentas e trinta e duas\n# \n#\n# Para obter o extenso de um valor monet\u00e1rio, utilize GExtenso.moeda.\n# \n# # IMPORTANTE: veja nota sobre o par\u00e2metro 'valor' na documenta\u00e7\u00e3o do m\u00e9todo!\n#\n# puts GExtenso.moeda(15402) # cento e cinquenta e quatro reais e dois centavos\n#\n# puts GExtenso.moeda(47) # quarenta e sete centavos\n#\n# puts GExtenso.moeda(357082, 2,\n# ['peseta', 'pesetas', GExtenso::GENERO_FEM],\n# ['c\u00eantimo', 'c\u00eantimos', GExtenso::GENERO_MASC])\n# # tr\u00eas mil, quinhentas e setenta pesetas e oitenta e dois c\u00eantimos\n#\n# author Fausto Gon\u00e7alves Cintra (goncin) \n# version 0.1 2010-06-10\n \nrequire 'date'\n\nmodule Extenso\n \n NUM_SING = 0\n NUM_PLURAL = 1\n POS_GENERO = 2\n GENERO_MASC = 0\n GENERO_FEM = 1\n \n VALOR_MAXIMO = 999999999\n \n # As unidades 1 e 2 variam em g\u00eanero, pelo que precisamos de dois conjuntos de strings (masculinas e femininas) para as unidades\n UNIDADES = {\n GENERO_MASC => {\n 1 => 'um',\n 2 => 'dois',\n 3 => 'tr\u00eas',\n 4 => 'quatro',\n 5 => 'cinco',\n 6 => 'seis',\n 7 => 'sete',\n 8 => 'oito',\n 9 => 'nove'\n },\n GENERO_FEM => {\n 1 => 'uma',\n 2 => 'duas',\n 3 => 'tr\u00eas',\n 4 => 'quatro',\n 5 => 'cinco',\n 6 => 'seis',\n 7 => 'sete',\n 8 => 'oito',\n 9 => 'nove'\n }\n }\n \n DE11A19 = {\n 11 => 'onze',\n 12 => 'doze',\n 13 => 'treze',\n 14 => 'quatorze',\n 15 => 'quinze',\n 16 => 'dezesseis',\n 17 => 'dezessete',\n 18 => 'dezoito',\n 19 => 'dezenove'\n }\n \n DEZENAS = {\n 10 => 'dez',\n 20 => 'vinte',\n 30 => 'trinta',\n 40 => 'quarenta',\n 50 => 'cinquenta',\n 60 => 'sessenta',\n 70 => 'setenta',\n 80 => 'oitenta',\n 90 => 'noventa'\n }\n \n CENTENA_EXATA = 'cem'\n \n # As centenas, com exce\u00e7\u00e3o de 'cento', tamb\u00e9m variam em g\u00eanero. Aqui tamb\u00e9m se faz\n # necess\u00e1rio dois conjuntos de strings (masculinas e femininas).\n \n CENTENAS = {\n GENERO_MASC => {\n 100 => 'cento',\n 200 => 'duzentos',\n 300 => 'trezentos',\n 400 => 'quatrocentos',\n 500 => 'quinhentos',\n 600 => 'seiscentos',\n 700 => 'setecentos',\n 800 => 'oitocentos',\n 900 => 'novecentos'\n },\n GENERO_FEM => {\n 100 => 'cento',\n 200 => 'duzentas',\n 300 => 'trezentas',\n 400 => 'quatrocentas',\n 500 => 'quinhentas',\n 600 => 'seiscentas',\n 700 => 'setecentas',\n 800 => 'oitocentas',\n 900 => 'novecentas'\n }\n }\n \n #'Mil' \u00e9 invari\u00e1vel, seja em g\u00eanero, seja em n\u00famero\n MILHAR = 'mil'\n\n MILHOES = {\n NUM_SING => 'milh\u00e3o',\n NUM_PLURAL => 'milh\u00f5es'\n }\n\n UNIDADES_ORDINAL = {\n GENERO_MASC => {\n 1 => 'primeiro',\n 2 => 'segundo',\n 3 => 'terceiro',\n 4 => 'quarto',\n 5 => 'quinto',\n 6 => 'sexto',\n 7 => 's\u00e9timo',\n 8 => 'oitavo',\n 9 => 'nono'},\n GENERO_FEM => {\n 1 => 'primeira',\n 2 => 'segunda',\n 3 => 'terceira',\n 4 => 'quarta',\n 5 => 'quinta',\n 6 => 'sexta',\n 7 => 's\u00e9tima',\n 8 => 'oitava',\n 9 => 'nona'}}\n\n DEZENAS_ORDINAL = {\n GENERO_MASC => {\n 10 => 'd\u00e9cimo',\n 20 => 'vig\u00e9simo',\n 30 => 'trig\u00e9simo',\n 40 => 'quadrag\u00e9simo',\n 50 => 'quinquag\u00e9simo',\n 60 => 'sexag\u00e9simo',\n 70 => 'septuag\u00e9simo',\n 80 => 'octog\u00e9simo',\n 90 => 'nonag\u00e9simo'},\n GENERO_FEM => {\n 10 => 'd\u00e9cima',\n 20 => 'vig\u00e9sima',\n 30 => 'trig\u00e9sima',\n 40 => 'quadrag\u00e9sima',\n 50 => 'quinquag\u00e9sima',\n 60 => 'sexag\u00e9sima',\n 70 => 'septuag\u00e9sima',\n 80 => 'octog\u00e9sima',\n 90 => 'nonag\u00e9sima'}}\n \n CENTENAS_ORDINAL = {\n GENERO_MASC => {\n 100 => 'cent\u00e9simo',\n 200 => 'ducent\u00e9simo',\n 300 => 'trecent\u00e9simo',\n 400 => 'quadringent\u00e9simo',\n 500 => 'quingent\u00e9simo',\n 600 => 'seiscent\u00e9simo',\n 700 => 'septingent\u00e9simo',\n 800 => 'octingent\u00e9simo',\n 900 => 'noningent\u00e9simo'},\n GENERO_FEM => {\n 100 => 'cent\u00e9sima',\n 200 => 'ducent\u00e9sima',\n 300 => 'trecent\u00e9sima',\n 400 => 'quadringent\u00e9sima',\n 500 => 'quingent\u00e9sima',\n 600 => 'seiscent\u00e9sima',\n 700 => 'septingent\u00e9sima',\n 800 => 'octingent\u00e9sima',\n 900 => 'noningent\u00e9sima'}}\n \n \n MILHAR_ORDINAL = {\n GENERO_MASC => {\n 1000 => 'mil\u00e9simo'},\n GENERO_FEM =>{\n 1000 => 'mil\u00e9sima'}}\n \n MESES = {\n 1 => 'janeiro',\n 2 => 'fevereiro',\n 3 => 'mar\u00e7o',\n 4 => 'abril',\n 5 => 'maio',\n 6 => 'junho',\n 7 => 'julho',\n 8 => 'agosto',\n 9 => 'setembro',\n 10 =>'outubro',\n 11 =>'novembro',\n 12 =>'dezembro'}\n\n def Extenso.is_int(s)\n Integer(s) != nil rescue false\n end\n \n #######################################################################################################################################\n \n def Extenso.numero (valor, genero = GENERO_MASC)\n\n # Gera a representa\u00e7\u00e3o por extenso de um n\u00famero inteiro, maior que zero e menor ou igual a VALOR_MAXIMO.\n #\n # PAR\u00c2METROS:\n # valor (Integer) O valor num\u00e9rico cujo extenso se deseja gerar\n #\n # genero (Integer) [Opcional; valor padr\u00e3o: GExtenso::GENERO_MASC] O g\u00eanero gramatical (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n # do extenso a ser gerado. Isso possibilita distinguir, por exemplo, entre 'duzentos e dois homens' e 'duzentas e duas mulheres'.\n #\n # VALOR DE RETORNO:\n # (String) O n\u00famero por extenso incluindo ponto flutuante caso exista\n \n # ----- VALIDA\u00c7\u00c3O DOS PAR\u00c2METROS DE ENTRADA ---- \n if valor < 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.numero] Par\u00e2metro 'valor' igual a ou menor que zero (recebido: '#{valor}')\"\n elsif valor > VALOR_MAXIMO\n raise '[Exce\u00e7\u00e3o em GExtenso::numero] Par\u00e2metro ''valor'' deve ser um inteiro entre 1 e ' + VALOR_MAXIMO.to_s + \" (recebido: '#{valor}')\"\n elsif genero != GENERO_MASC && genero != GENERO_FEM\n raise \"Exce\u00e7\u00e3o em GExtenso: valor incorreto para o par\u00e2metro 'genero' (recebido: '#{genero}')\"\n end\n \n if (valor - Integer(valor)) == 0\n return Extenso.numero_inteiro(Integer(valor))\n else\n flutuante = valor - Integer(valor)\n flutuante = flutuante.round(3)\n zeros = (1..4).detect{|x| 1.0*flutuante*(10**x) >= 1} - 1\n zeros_str = \"\"\n (1..zeros).each {|x| zeros_str << \"zero \"}\n casas = (1..4).detect{|x| 1.0*flutuante*(10**x)%10 == 0} - 1\n return Extenso.numero_inteiro(Integer(valor)) + \" ponto \" + zeros_str + Extenso.numero_inteiro((1.0*(10**casas)*flutuante).to_i)\n end\n end\n\n def Extenso.numero_inteiro(valor, genero = GENERO_MASC) \n if valor == 0\n return \"zero\"\n elsif valor >= 1 && valor <= 9\n UNIDADES[genero][valor]\n \n elsif valor == 10\n DEZENAS[valor]\n\n elsif valor >= 11 && valor <= 19\n DE11A19[valor]\n \n elsif valor >= 20 && valor <= 99\n dezena = valor - (valor % 10)\n ret = DEZENAS[dezena]\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre dezenas e unidades.\n resto = valor - dezena\n if resto > 0\n ret += ' e ' + Extenso.numero(resto, genero)\n end\n ret\n\n elsif valor == 100 \n CENTENA_EXATA\n\n elsif valor >= 101 && valor <= 999\n centena = valor - (valor % 100)\n ret = CENTENAS[genero][centena] # As centenas (exceto 'cento') variam em g\u00eanero\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre centenas e dezenas.\n resto = valor - centena \n if resto > 0\n ret += ' e ' + Extenso.numero(resto, genero)\n end\n ret\n\n elsif valor >= 1000 && valor <= 999999\n # A fun\u00e7\u00e3o 'floor' \u00e9 utilizada para encontrar o inteiro da divis\u00e3o de valor por 1000,\n # assim determinando a quantidade de milhares. O resultado \u00e9 enviado a uma chamada recursiva\n # da fun\u00e7\u00e3o. A palavra 'mil' n\u00e3o se flexiona.\n milhar = (valor / 1000).floor\n ret = Extenso.numero(milhar, GENERO_MASC) + ' ' + MILHAR # 'Mil' \u00e9 do g\u00eanero masculino\n resto = valor % 1000\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre milhares e n\u00fameros entre 1 e 99, bem como antes de centenas exatas.\n if resto > 0 && ((resto >= 1 && resto <= 99) || resto % 100 == 0)\n ret += ' e ' + Extenso.numero(resto, genero)\n # Nos demais casos, ap\u00f3s o milhar \u00e9 utilizada a v\u00edrgula.\n elsif (resto > 0)\n ret += ', ' + Extenso.numero(resto, genero)\n end\n ret\n\n elsif valor >= 100000 && valor <= VALOR_MAXIMO\n # A fun\u00e7\u00e3o 'floor' \u00e9 utilizada para encontrar o inteiro da divis\u00e3o de valor por 1000000,\n # assim determinando a quantidade de milh\u00f5es. O resultado \u00e9 enviado a uma chamada recursiva\n # da fun\u00e7\u00e3o. A palavra 'milh\u00e3o' flexiona-se no plural.\n milhoes = (valor / 1000000).floor\n ret = Extenso.numero(milhoes, GENERO_MASC) + ' ' # Milh\u00e3o e milh\u00f5es s\u00e3o do g\u00eanero masculino\n \n # Se a o n\u00famero de milh\u00f5es for maior que 1, deve-se utilizar a forma flexionada no plural\n ret += milhoes == 1 ? MILHOES[NUM_SING] : MILHOES[NUM_PLURAL]\n\n resto = valor % 1000000\n\n # Chamada recursiva \u00e0 fun\u00e7\u00e3o para processar resto se este for maior que zero.\n # O conectivo 'e' \u00e9 utilizado entre milh\u00f5es e n\u00fameros entre 1 e 99, bem como antes de centenas exatas.\n if resto && ((resto >= 1 && resto <= 99) || resto % 100 == 0)\n ret += ' e ' + Extenso.numero(resto, genero)\n # Nos demais casos, ap\u00f3s o milh\u00e3o \u00e9 utilizada a v\u00edrgula.\n elsif resto > 0\n ret += ', ' + Extenso.numero(resto, genero)\n end\n ret\n\n end\n \n end\n \n #######################################################################################################################################\n \n def Extenso.moeda(\n valor,\n casas_decimais = 2,\n info_unidade = ['real', 'reais', GENERO_MASC],\n info_fracao = ['centavo', 'centavos', GENERO_MASC]\n ) \n \n # Gera a representa\u00e7\u00e3o por extenso de um valor monet\u00e1rio, maior que zero e menor ou igual a GExtenso::VALOR_MAXIMO.\n #\n #\n # PAR\u00c2METROS:\n # valor (Integer) O valor monet\u00e1rio cujo extenso se deseja gerar.\n # ATEN\u00c7\u00c3O: PARA EVITAR OS CONHECIDOS PROBLEMAS DE ARREDONDAMENTO COM N\u00daMEROS DE PONTO FLUTUANTE, O VALOR DEVE SER PASSADO\n # J\u00c1 DEVIDAMENTE MULTIPLICADO POR 10 ELEVADO A $casasDecimais (o que equivale, normalmente, a passar o valor com centavos\n # multiplicado por 100)\n #\n # casas_decimais (Integer) [Opcional; valor padr\u00e3o: 2] N\u00famero de casas decimais a serem consideradas como parte fracion\u00e1ria (centavos)\n #\n # info_unidade (Array) [Opcional; valor padr\u00e3o: ['real', 'reais', GExtenso::GENERO_MASC]] Fornece informa\u00e7\u00f5es sobre a moeda a ser\n # utilizada. O primeiro valor da matriz corresponde ao nome da moeda no singular, o segundo ao nome da moeda no plural e o terceiro\n # ao g\u00eanero gramatical do nome da moeda (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n #\n # info_fracao (Array) [Opcional; valor padr\u00e3o: ['centavo', 'centavos', GExtenso::GENERO_MASC]] Prov\u00ea informa\u00e7\u00f5es sobre a parte fracion\u00e1ria\n # da moeda. O primeiro valor da matriz corresponde ao nome da parte fracion\u00e1ria no singular, o segundo ao nome da parte fracion\u00e1ria no plural\n # e o terceiro ao g\u00eanero gramatical da parte fracion\u00e1ria (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n #\n # VALOR DE RETORNO:\n # (String) O valor monet\u00e1rio por extenso\n \n # ----- VALIDA\u00c7\u00c3O DOS PAR\u00c2METROS DE ENTRADA ----\n if ! Extenso.is_int(valor)\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'valor' n\u00e3o \u00e9 num\u00e9rico (recebido: '#{valor}')\"\n\n elsif valor < 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro valor igual a ou menor que zero (recebido: '#{valor}')\"\n\n elsif ! Extenso.is_int(casas_decimais) || casas_decimais < 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'casas_decimais' n\u00e3o \u00e9 num\u00e9rico ou \u00e9 menor que zero (recebido: '#{casas_decimais}')\"\n\n elsif info_unidade.class != Array || info_unidade.length < 3\n temp = info_unidade.class == Array ? '[' + info_unidade.join(', ') + ']' : \"'#{info_unidade}'\"\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'info_unidade' n\u00e3o \u00e9 uma matriz com 3 (tr\u00eas) elementos (recebido: #{temp})\"\n \n elsif info_unidade[POS_GENERO] != GENERO_MASC && info_unidade[POS_GENERO] != GENERO_FEM\n raise \"Exce\u00e7\u00e3o em GExtenso: valor incorreto para o par\u00e2metro 'info_unidade[POS_GENERO]' (recebido: '#{info_unidade[POS_GENERO]}')\"\n\n elsif info_fracao.class != Array || info_fracao.length < 3\n temp = info_fracao.class == Array ? '[' + info_fracao.join(', ') + ']' : \"'#{info_fracao}'\"\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] Par\u00e2metro 'info_fracao' n\u00e3o \u00e9 uma matriz com 3 (tr\u00eas) elementos (recebido: #{temp})\"\n \n elsif info_fracao[POS_GENERO] != GENERO_MASC && info_fracao[POS_GENERO] != GENERO_FEM\n raise \"[Exce\u00e7\u00e3o em GExtenso.moeda] valor incorreto para o par\u00e2metro 'info_fracao[POS_GENERO]' (recebido: '#{info_fracao[POS_GENERO]}').\"\n elsif valor == 0\n return \"zero reais\"\n end\n\n # -----------------------------------------------\n\n ret = ''\n\n # A parte inteira do valor monet\u00e1rio corresponde ao valor passado dividido por 10 elevado a casas_decimais, desprezado o resto.\n # Assim, com o padr\u00e3o de 2 casas_decimais, o valor ser\u00e1 dividido por 100 (10^2), e o resto \u00e9 descartado utilizando-se floor().\n parte_inteira = valor.floor / (10**casas_decimais)\n\n # A parte fracion\u00e1ria ('centavos'), por seu turno, corresponder\u00e1 ao resto da divis\u00e3o do valor por 10 elevado a casas_decimais.\n # No cen\u00e1rio comum em que trabalhamos com 2 casas_decimais, ser\u00e1 o resto da divis\u00e3o do valor por 100 (10^2).\n fracao = valor % (10**casas_decimais)\n\n # O extenso para a parte_inteira somente ser\u00e1 gerado se esta for maior que zero. Para tanto, utilizamos\n # os pr\u00e9stimos do m\u00e9todo GExtenso::numero().\n if parte_inteira > 0\n ret = Extenso.numero(parte_inteira, info_unidade[POS_GENERO]) + ' '\n ret += parte_inteira == 1 ? info_unidade[NUM_SING] : info_unidade[NUM_PLURAL]\n end\n\n # De forma semelhante, o extenso da fracao somente ser\u00e1 gerado se esta for maior que zero. */\n if fracao > 0\n # Se a parte_inteira for maior que zero, o extenso para ela j\u00e1 ter\u00e1 sido gerado. Antes de juntar os\n # centavos, precisamos colocar o conectivo 'e'.\n if parte_inteira > 0\n ret += ' e '\n end\n ret += Extenso.numero(fracao, info_fracao[POS_GENERO]) + ' '\n ret += parte_inteira == 1 ? info_fracao[NUM_SING] : info_fracao[NUM_PLURAL]\n end\n\n ret\n\n end\n######################################################################################################################################################\n def Extenso.data(data)\n # Escreve uma data por extenso\n # A variavel data \u00e9 um objeto da classe Date\n # Uso esta classe porque ela d\u00e1 suporte \u00e0 opera\u00e7\u00f5es elementares com datas e mant\u00e9m a consist\u00eancia nos sistemas que estou programando\n # VALOR DE RETORNO: \n # (String) A data escrita por extenso\n # Exemplo: GExtenso.data(Date.parse \"23/12/1983\") ==> \"vinte e tr\u00eas de dezembro de mil novecentos e oitenta e tr\u00eas\"\n %Q{#{Extenso.numero(data.day)} de #{MESES[data.month]} de #{Extenso.numero(data.year)}}\n end\n\n######################################################################################################################################################\n def Extenso.ordinal (valor, genero = GENERO_MASC)\n\n # Gera a representa\u00e7\u00e3o ordinal de um n\u00famero inteiro de 1 \u00e0 1000\n\n # PAR\u00c2METROS:\n # valor (Integer) O valor num\u00e9rico cujo extenso se deseja gerar\n #\n # genero (Integer) [Opcional; valor padr\u00e3o: GExtenso::GENERO_MASC] O g\u00eanero gramatical (GExtenso::GENERO_MASC ou GExtenso::GENERO_FEM)\n # do extenso a ser gerado. Isso possibilita distinguir, por exemplo, entre 'duzentos e dois homens' e 'duzentas e duas mulheres'.\n #\n # VALOR DE RETORNO:\n # (String) O n\u00famero por extenso\n \n # ----- VALIDA\u00c7\u00c3O DOS PAR\u00c2METROS DE ENTRADA ---- \n \n if !is_int(valor)\n raise \"[Exce\u00e7\u00e3o em GExtenso.numero] Par\u00e2metro 'valor' n\u00e3o \u00e9 num\u00e9rico (recebido: '#{valor}')\"\n elsif valor <= 0\n raise \"[Exce\u00e7\u00e3o em GExtenso.numero] Par\u00e2metro 'valor' igual a ou menor que zero (recebido: '#{valor}')\"\n elsif valor > VALOR_MAXIMO\n raise '[Exce\u00e7\u00e3o em GExtenso::numero] Par\u00e2metro ''valor'' deve ser um inteiro entre 1 e ' + VALOR_MAXIMO.to_s + \" (recebido: '#{valor}')\"\n elsif genero != GENERO_MASC && genero != GENERO_FEM\n raise \"Exce\u00e7\u00e3o em GExtenso: valor incorreto para o par\u00e2metro 'genero' (recebido: '#{genero}')\"\n # ------------------------------------------------\n elsif valor >= 1 && valor <= 9\n return UNIDADES_ORDINAL[genero][valor]\n elsif valor >= 10 && valor <= 99\n dezena = valor - (valor % 10)\n resto = valor - dezena\n ret = DEZENAS_ORDINAL[genero][dezena]+\" \"\n if resto > 0 then ret+= Extenso.ordinal(resto,genero); end\n return ret\n elsif valor >= 100 && valor <= 999\n centena = valor - (valor % 100)\n resto = valor - centena \n ret = CENTENAS_ORDINAL[genero][centena]+\" \"\n if resto > 0 then ret += Extenso.ordinal(resto, genero); end\n return ret\n elsif valor == 1000\n return MILHAR_ORDINAL[genero][valor]+\" \"\n end\n end\n\n def Extenso.moeda_real(numero)\n unit = \"R$\"\n separator = \",\"\n delimiter = \".\"\n mystring = sprintf(\"%s %.2f\",unit, numero)\n mystring = mystring.gsub(\".\",separator)\n pos = mystring.match(separator).begin(0) - 3\n while !(/[0-9]/.match(mystring[pos-1])== nil) do\n mystring.insert(pos,delimiter)\n pos-=3\n end \n return mystring\n end\n\n def Extenso.to_cash(numero)\n unit = \"R$\"\n separator = \",\"\n delimiter = \".\"\n mystring = sprintf(\"%s %.2f\",unit, numero)\n mystring = mystring.gsub(\".\",separator)\n pos = mystring.match(separator).begin(0) - 3\n while !(/[0-9]/.match(mystring[pos-1])== nil) do\n mystring.insert(pos,delimiter)\n pos-=3\n end \n return mystring << \" (\" << Extenso.moeda((numero*100).to_i) << \")\"\n end \n \nend "}}
-{"repo": "pegjs/website", "pr_number": 10, "title": "Small website improvements", "state": "closed", "merged_at": null, "additions": 4, "deletions": 34, "files_changed": ["public/css/content.css", "public/js/online.js"], "files_before": {"public/css/content.css": "#content h1 { margin: 0 0 .75em 0; font-size: 200%; }\n#content h2 {\n margin: 1.5em 0 .75em 0; border-bottom: 2pt dotted silver;\n font-size: 150%;\n}\n#content h3 { margin: 1.5em 0 .5em 0; font-size: 125%; }\n#content li { margin: .5em 0; }\n#content dt { font-weight: bold; }\n#content dd { margin-top: 1em; margin-bottom: 1em; }\n#content aside.info { margin: 1em 0 1em 2em; color: gray; }\n#content pre {\n overflow: auto;\n padding: .5em 1em; border-left: 5px solid silver;\n background-color: #f0f0f0;\n}\n#content table { border-spacing: 0; }\n#content a, #content a:visited { color: #3d586c; }\n\n#content .center { text-align: center; }\n\n/* Home */\n\n#content #sidebar {\n float: right; width: 17em;\n font-size: 80%; text-align: center;\n}\n#content #sidebar a {\n font-weight: bold; text-decoration: none;\n color: #006000;\n}\n#content #sidebar a:hover { text-decoration: underline; }\n#content #sidebar a.try {\n display: block;\n padding: .75em; border-radius: .6em; -moz-border-radius: .6em;\n font-size: 140%;\n color: #e0ffe0; background-color: #499149;\n}\n#content #sidebar a.try:hover {\n text-decoration: none; background-color: #006000;\n}\n#content #sidebar .npm {\n padding: .75em; border: 1px solid #499149; border-radius: .7em; -moz-border-radius: .7em;\n font-family: \"Lucida Console\", fixed, monospace; font-size: 120%;\n color: #2c572c; background-color: #e0ffe0;\n}\n#content #sidebar .label { margin-left: 2.6em; text-align: left; color: #606060; }\n#content #sidebar #download {\n list-style: square;\n padding-left: 2em;\n color: gray;\n text-align: left;\n font-size: 120%;\n}\n#content #sidebar a.twitter { display:block; margin-top: 5em; }\n#content #sidebar .separator { color: gray; margin: 1.5em 0 1em 0; }\n\n#content #left-column { margin-right: 17em; }\n\n/* Online Version */\n\n#content .message {\n border-radius: .5em; -moz-border-radius: .5em; padding: .5em 1em;\n}\n#content .message.info { background-color: #c0ffc0; }\n#content .message.info a.download { display: block; float: right; }\n#content .message.info .size-and-time { visibility: hidden; float: right; font-size: 70%; margin: .3em 0; color: #80c080; }\n#content .message.info:hover .size-and-time { visibility: visible; }\n#content .message.error { background-color: orange; }\n#content .message.progress {\n padding-left: 40px;\n /* Spinner image generated by http://www.loadinfo.net/. */\n background: #ffff80 url(\"../img/spinner-16x16-progress.gif\") 14px center no-repeat;\n}\n#content .message.disabled { color: gray; background-color: #f0f0f0; }\n\n#content table.form { width: 100%; }\n#content table.form td, table.form th { padding: .5em 1em; }\n#content table.form td:first-child, table.form th:first-child { padding-left: 0; }\n#content table.form td:last-child, table.form th:last-child { padding-right: 0; }\n#content table.form th { text-align: left; font-weight: normal; }\n\n#content h2.suggestion { border: none; }\n#content h2.suggestion.top { margin-top: 0; }\n#content h2.suggestion .step-number {\n display: block; float: left;\n width: 1.5em;\n border-radius: .4em; -moz-border-radius: .4em;\n text-align: center;\n color: white; background-color: black;\n}\n#content h2.suggestion .step-title { margin-left: 2.5em; }\n\n#content textarea.code { width: 100%; height: 20em; font-family: \"Lucida Console\", fixed, monospace; }\n\n#content .textarea-wrapper { padding-right: 6px; }\n\n#content .tooltip { position: absolute; display: none; }\n#content .tooltip .content {\n padding: .5em 1em;\n box-shadow: .25em .25em .5em rgba(0, 0, 0, 0.25); -webkit-box-shadow: .25em .25em .5em rgba(0, 0, 0, 0.25); -moz-box-shadow: .25em .25em .5em rgba(0, 0, 0, 0.25);\n border-radius: .5em; -moz-border-radius: .5em;\n padding: .5em 1em;\n color: white; background-color: black;\n font-size: 80%;\n}\n#content .tooltip .arrow {\n height: 6px;\n background: url(\"../img/tooltip-arrow.png\") top center no-repeat;\n}\n\n#content #columns { width: 100%; height: 100%; border-spacing: 1em; }\n#content #columns td { width: 50%; }\n\n#content table.column { width: 100%; height: 100%; }\n#content table.column td { vertical-align: top; }\n/* Browsers will enlarge the |.content-height| cells to fit the contents. */\n#content table.column td.content-height { height: 1px; }\n\n#content #output-header {\n margin: 1.25em 0 0 0; border: none; padding: .25em 1.2em .25em 1.2em;\n font-size: 80%;\n color: white; background-color: silver;\n}\n#content #output {\n overflow: auto;\n max-height: 20em;\n margin: 0; padding: .5em 1em; border: 2px solid silver; border-top: none;\n background-color: #f0f0f0;\n}\n#content #output.disabled { color: gray; }\n\n#content #settings { padding: .5em 0; }\n#content #settings label { padding-right: 1em; }\n#content #settings label[for=option-optimize] { padding-left: 2em; }\n#content #parser-var { width: 15em; }\n#content #options { padding-top: 1em; }\n#content #parser-download {\n float: right;\n width: 9em;\n margin-top: 2em;\n padding: .5em; border-radius: .4em; -moz-border-radius: .4em;\n text-align: center; text-decoration: none;\n color: #e0ffe0; background-color: #499149;\n}\n#content #parser-download:hover { background-color: #006000; }\n#content #parser-download.disabled { color: #e0e0e0; background-color: gray; }\n", "public/js/online.js": "$(document).ready(function() {\n var KB = 1024;\n var MS_IN_S = 1000;\n\n var parser;\n\n var buildAndParseTimer = null;\n var parseTimer = null;\n\n var oldGrammar = null;\n var oldParserVar = null;\n var oldOptionCache = null;\n var oldOptionOptimize = null;\n var oldInput = null;\n\n function buildSizeAndTimeInfoHtml(title, size, time) {\n return $(\"\", {\n \"class\": \"size-and-time\",\n title: title,\n html: (size / KB).toPrecision(2) + \" kB, \"\n + time + \" ms, \"\n + ((size / KB) / (time / MS_IN_S)).toPrecision(2) + \" kB/s\"\n });\n }\n\n function buildErrorMessage(e) {\n return e.line !== undefined && e.column !== undefined\n ? \"Line \" + e.line + \", column \" + e.column + \": \" + e.message\n : e.message;\n }\n\n function build() {\n oldGrammar = $(\"#grammar\").val();\n oldParserVar = $(\"#parser-var\").val();\n oldOptionCache = $(\"#option-cache\").is(\":checked\");\n oldOptionOptimize = $(\"#option-optimize\").val();\n\n $('#build-message').attr(\"class\", \"message progress\").text(\"Building the parser...\");\n $(\"#input\").attr(\"disabled\", \"disabled\");\n $(\"#parse-message\").attr(\"class\", \"message disabled\").text(\"Parser not available.\");\n $(\"#output\").addClass(\"disabled\").text(\"Output not available.\");\n $(\"#parser-var\").attr(\"disabled\", \"disabled\");\n $(\"#option-cache\").attr(\"disabled\", \"disabled\");\n $(\"#option-optimize\").attr(\"disabled\", \"disabled\");\n $(\"#parser-download\").addClass(\"disabled\");\n\n try {\n var timeBefore = (new Date).getTime();\n var parserSource = PEG.buildParser($(\"#grammar\").val(), {\n cache: $(\"#option-cache\").is(\":checked\"),\n optimize: $(\"#option-optimize\").val(),\n output: \"source\"\n });\n var timeAfter = (new Date).getTime();\n\n parser = eval(parserSource);\n\n $(\"#build-message\")\n .attr(\"class\", \"message info\")\n .html(\"Parser built successfully.\")\n .append(buildSizeAndTimeInfoHtml(\n \"Parser build time and speed\",\n $(\"#grammar\").val().length,\n timeAfter - timeBefore\n ));\n var parserUrl = \"data:text/plain;charset=utf-8;base64,\"\n + Base64.encode($(\"#parser-var\").val() + \" = \" + parserSource + \";\\n\");\n $(\"#input\").removeAttr(\"disabled\");\n $(\"#parser-var\").removeAttr(\"disabled\");\n $(\"#option-cache\").removeAttr(\"disabled\");\n $(\"#option-optimize\").removeAttr(\"disabled\");\n $(\"#parser-download\").removeClass(\"disabled\").attr(\"href\", parserUrl);\n\n var result = true;\n } catch (e) {\n $(\"#build-message\").attr(\"class\", \"message error\").text(buildErrorMessage(e));\n var parserUrl = \"data:text/plain;charset=utf-8;base64,\"\n + Base64.encode(\"Parser not available.\");\n $(\"#parser-download\").attr(\"href\", parserUrl);\n\n var result = false;\n }\n\n doLayout();\n return result;\n }\n\n function parse() {\n oldInput = $(\"#input\").val();\n\n $(\"#input\").removeAttr(\"disabled\");\n $(\"#parse-message\").attr(\"class\", \"message progress\").text(\"Parsing the input...\");\n $(\"#output\").addClass(\"disabled\").text(\"Output not available.\");\n\n try {\n var timeBefore = (new Date).getTime();\n var output = parser.parse($(\"#input\").val());\n var timeAfter = (new Date).getTime();\n\n $(\"#parse-message\")\n .attr(\"class\", \"message info\")\n .text(\"Input parsed successfully.\")\n .append(buildSizeAndTimeInfoHtml(\n \"Parsing time and speed\",\n $(\"#input\").val().length,\n timeAfter - timeBefore\n ));\n $(\"#output\").removeClass(\"disabled\").text(jsDump.parse(output));\n\n var result = true;\n } catch (e) {\n $(\"#parse-message\").attr(\"class\", \"message error\").text(buildErrorMessage(e));\n\n var result = false;\n }\n\n doLayout();\n return result;\n }\n\n function buildAndParse() {\n build() && parse();\n }\n\n function scheduleBuildAndParse() {\n var nothingChanged = $(\"#grammar\").val() === oldGrammar\n && $(\"#parser-var\").val() === oldParserVar\n && $(\"#option-cache\").is(\":checked\") === oldOptionCache\n && $(\"#option-optimize\").val() === oldOptionOptimize;\n if (nothingChanged) { return; }\n\n if (buildAndParseTimer !== null) {\n clearTimeout(buildAndParseTimer);\n buildAndParseTimer = null;\n }\n if (parseTimer !== null) {\n clearTimeout(parseTimer);\n parseTimer = null;\n }\n\n buildAndParseTimer = setTimeout(function() {\n buildAndParse();\n buildAndParseTimer = null;\n }, 500);\n }\n\n function scheduleParse() {\n if ($(\"#input\").val() === oldInput) { return; }\n if (buildAndParseTimer !== null) { return; }\n\n if (parseTimer !== null) {\n clearTimeout(parseTimer);\n parseTimer = null;\n }\n\n parseTimer = setTimeout(function() {\n parse();\n parseTimer = null;\n }, 500);\n }\n\n function doLayout() {\n /*\n * This forces layout of the page so that the |#columns| table gets a chance\n * make itself smaller when the browser window shrinks.\n */\n if ($.browser.msie || $.browser.opera) {\n $(\"#left-column\").height(\"0px\");\n $(\"#right-column\").height(\"0px\");\n }\n $(\"#grammar\").height(\"0px\");\n $(\"#input\").height(\"0px\");\n\n if ($.browser.msie || $.browser.opera) {\n $(\"#left-column\").height(($(\"#left-column\").parent().innerHeight() - 2) + \"px\");\n $(\"#right-column\").height(($(\"#right-column\").parent().innerHeight() - 2) + \"px\");\n }\n\n $(\"#grammar\").height(($(\"#grammar\").parent().parent().innerHeight() - 14) + \"px\");\n $(\"#input\").height(($(\"#input\").parent().parent().innerHeight() - 14) + \"px\");\n }\n\n $(\"#grammar, #parser-var, #option-cache, #option-optimize\")\n .change(scheduleBuildAndParse)\n .mousedown(scheduleBuildAndParse)\n .mouseup(scheduleBuildAndParse)\n .click(scheduleBuildAndParse)\n .keydown(scheduleBuildAndParse)\n .keyup(scheduleBuildAndParse)\n .keypress(scheduleBuildAndParse);\n\n $(\"#input\")\n .change(scheduleParse)\n .mousedown(scheduleParse)\n .mouseup(scheduleParse)\n .click(scheduleParse)\n .keydown(scheduleParse)\n .keyup(scheduleParse)\n .keypress(scheduleParse);\n\n doLayout();\n $(window).resize(doLayout);\n\n $(\"#loader\").hide();\n $(\"#content\").show();\n\n $(\"#grammar, #parser-var, #option-cache, #option-optimize\").removeAttr(\"disabled\");\n\n $(\"#grammar, #input\").focus(function() {\n var textarea = $(this);\n\n setTimeout(function() {\n textarea.unbind(\"focus\");\n\n var tooltip = textarea.next();\n var position = textarea.position();\n\n tooltip.css({\n top: (position.top - tooltip.outerHeight() - 5) + \"px\",\n left: (position.left + textarea.outerWidth() - tooltip.outerWidth()) + \"px\"\n }).fadeTo(400, 0.8).delay(3000).fadeOut();\n }, 1000);\n });\n\n $(\"#grammar\").focus();\n\n buildAndParse();\n});\n"}, "files_after": {"public/css/content.css": "#content h1 { margin: 0 0 .75em 0; font-size: 200%; }\n#content h2 {\n margin: 1.5em 0 .75em 0; border-bottom: 2pt dotted silver;\n font-size: 150%;\n}\n#content h3 { margin: 1.5em 0 .5em 0; font-size: 125%; }\n#content li { margin: .5em 0; }\n#content dt { font-weight: bold; }\n#content dd { margin-top: 1em; margin-bottom: 1em; }\n#content aside.info { margin: 1em 0 1em 2em; color: gray; }\n#content pre {\n overflow: auto;\n padding: .5em 1em; border-left: 5px solid silver;\n background-color: #f0f0f0;\n}\n#content table { border-spacing: 0; }\n#content a, #content a:visited { color: #3d586c; }\n\n#content .center { text-align: center; }\n\n/* Home */\n\n#content #sidebar {\n float: right; width: 17em;\n font-size: 80%; text-align: center;\n}\n#content #sidebar a {\n font-weight: bold; text-decoration: none;\n color: #006000;\n}\n#content #sidebar a:hover { text-decoration: underline; }\n#content #sidebar a.try {\n display: block;\n padding: .75em; border-radius: .6em; -moz-border-radius: .6em;\n font-size: 140%;\n color: #e0ffe0; background-color: #499149;\n}\n#content #sidebar a.try:hover {\n text-decoration: none; background-color: #006000;\n}\n#content #sidebar .npm {\n padding: .75em; border: 1px solid #499149; border-radius: .7em; -moz-border-radius: .7em;\n font-family: \"Lucida Console\", fixed, monospace; font-size: 120%;\n color: #2c572c; background-color: #e0ffe0;\n}\n#content #sidebar .label { margin-left: 2.6em; text-align: left; color: #606060; }\n#content #sidebar #download {\n list-style: square;\n padding-left: 2em;\n color: gray;\n text-align: left;\n font-size: 120%;\n}\n#content #sidebar a.twitter { display:block; margin-top: 5em; }\n#content #sidebar .separator { color: gray; margin: 1.5em 0 1em 0; }\n\n#content #left-column { margin-right: 17em; }\n\n/* Online Version */\n\n#content .message {\n border-radius: .5em; -moz-border-radius: .5em; padding: .5em 1em;\n}\n#content .message.info { background-color: #c0ffc0; }\n#content .message.info a.download { display: block; float: right; }\n#content .message.info .size-and-time { visibility: hidden; float: right; font-size: 70%; margin: .3em 0; color: #80c080; }\n#content .message.info:hover .size-and-time { visibility: visible; }\n#content .message.error { background-color: orange; }\n#content .message.progress {\n padding-left: 40px;\n /* Spinner image generated by http://www.loadinfo.net/. */\n background: #ffff80 url(\"../img/spinner-16x16-progress.gif\") 14px center no-repeat;\n}\n#content .message.disabled { color: gray; background-color: #f0f0f0; }\n\n#content table.form { width: 100%; }\n#content table.form td, table.form th { padding: .5em 1em; }\n#content table.form td:first-child, table.form th:first-child { padding-left: 0; }\n#content table.form td:last-child, table.form th:last-child { padding-right: 0; }\n#content table.form th { text-align: left; font-weight: normal; }\n\n#content h2.suggestion { border: none; }\n#content h2.suggestion.top { margin-top: 0; }\n#content h2.suggestion .step-number {\n display: block; float: left;\n width: 1.5em;\n border-radius: .4em; -moz-border-radius: .4em;\n text-align: center;\n color: white; background-color: black;\n}\n#content h2.suggestion .step-title { margin-left: 2.5em; }\n\n#content textarea.code { width: 100%; height: 20em; font-family: \"Lucida Console\", fixed, monospace; }\n\n#content .textarea-wrapper { padding-right: 6px; }\n\n#content #columns { width: 100%; height: 100%; border-spacing: 1em; }\n#content #columns td { width: 50%; }\n\n#content table.column { width: 100%; height: 100%; }\n#content table.column td { vertical-align: top; }\n/* Browsers will enlarge the |.content-height| cells to fit the contents. */\n#content table.column td.content-height { height: 1px; }\n\n#content #output-header {\n margin: 1.25em 0 0 0; border: none; padding: .25em 1.2em .25em 1.2em;\n font-size: 80%;\n color: white; background-color: silver;\n}\n#content #output {\n overflow: auto;\n max-height: 20em;\n margin: 0; padding: .5em 1em; border: 2px solid silver; border-top: none;\n background-color: #f0f0f0;\n}\n#content #output.disabled { color: gray; }\n\n#content #settings { padding: .5em 0; }\n#content #settings label { padding-right: 1em; }\n#content #parser-var { width: 15em; }\n#content #options { padding-top: 1em; }\n#content #parser-download {\n float: right;\n width: 9em;\n margin-top: 2em;\n padding: .5em; border-radius: .4em; -moz-border-radius: .4em;\n text-align: center; text-decoration: none;\n color: #e0ffe0; background-color: #499149;\n}\n#content #parser-download:hover { background-color: #006000; }\n#content #parser-download.disabled { color: #e0e0e0; background-color: gray; }\n", "public/js/online.js": "$(document).ready(function() {\n var KB = 1024;\n var MS_IN_S = 1000;\n\n var parser;\n\n var buildAndParseTimer = null;\n var parseTimer = null;\n\n var oldGrammar = null;\n var oldParserVar = null;\n var oldOptionCache = null;\n var oldOptionTrackLineAndColumn = null;\n var oldInput = null;\n\n function buildSizeAndTimeInfoHtml(title, size, time) {\n return $(\"\", {\n \"class\": \"size-and-time\",\n title: title,\n html: (size / KB).toPrecision(2) + \" kB, \"\n + time + \" ms, \"\n + ((size / KB) / (time / MS_IN_S)).toPrecision(2) + \" kB/s\"\n });\n }\n\n function buildErrorMessage(e) {\n return e.line !== undefined && e.column !== undefined\n ? \"Line \" + e.line + \", column \" + e.column + \": \" + e.message\n : e.message;\n }\n\n function build() {\n oldGrammar = $(\"#grammar\").val();\n oldParserVar = $(\"#parser-var\").val();\n oldOptionCache = $(\"#option-cache\").is(\":checked\"),\n oldOptionTrackLineAndColumn = $(\"#option-track-line-and-column\").is(\":checked\")\n\n $('#build-message').attr(\"class\", \"message progress\").text(\"Building the parser...\");\n $(\"#input\").attr(\"disabled\", \"disabled\");\n $(\"#parse-message\").attr(\"class\", \"message disabled\").text(\"Parser not available.\");\n $(\"#output\").addClass(\"disabled\").text(\"Output not available.\");\n $(\"#parser-var\").attr(\"disabled\", \"disabled\");\n $(\"#option-cache\").attr(\"disabled\", \"disabled\");\n $(\"#option-track-line-and-column\").attr(\"disabled\", \"disabled\");\n $(\"#parser-download\").addClass(\"disabled\");\n\n try {\n var timeBefore = (new Date).getTime();\n parser = PEG.buildParser($(\"#grammar\").val(), {\n cache: $(\"#option-cache\").is(\":checked\"),\n trackLineAndColumn: $(\"#option-track-line-and-column\").is(\":checked\")\n });\n var timeAfter = (new Date).getTime();\n\n $(\"#build-message\")\n .attr(\"class\", \"message info\")\n .html(\"Parser built successfully.\")\n .append(buildSizeAndTimeInfoHtml(\n \"Parser build time and speed\",\n $(\"#grammar\").val().length,\n timeAfter - timeBefore\n ));\n var parserUrl = \"data:text/plain;charset=utf-8;base64,\"\n + Base64.encode($(\"#parser-var\").val() + \" = \" + parser.toSource() + \";\\n\");\n $(\"#input\").removeAttr(\"disabled\");\n $(\"#parser-var\").removeAttr(\"disabled\");\n $(\"#option-cache\").removeAttr(\"disabled\");\n $(\"#option-track-line-and-column\").removeAttr(\"disabled\");\n $(\"#parser-download\").removeClass(\"disabled\").attr(\"href\", parserUrl);\n\n var result = true;\n } catch (e) {\n $(\"#build-message\").attr(\"class\", \"message error\").text(buildErrorMessage(e));\n var parserUrl = \"data:text/plain;charset=utf-8;base64,\"\n + Base64.encode(\"Parser not available.\");\n $(\"#parser-download\").attr(\"href\", parserUrl);\n\n var result = false;\n }\n\n doLayout();\n return result;\n }\n\n function parse() {\n oldInput = $(\"#input\").val();\n\n $(\"#input\").removeAttr(\"disabled\");\n $(\"#parse-message\").attr(\"class\", \"message progress\").text(\"Parsing the input...\");\n $(\"#output\").addClass(\"disabled\").text(\"Output not available.\");\n\n try {\n var timeBefore = (new Date).getTime();\n var output = parser.parse($(\"#input\").val());\n var timeAfter = (new Date).getTime();\n\n $(\"#parse-message\")\n .attr(\"class\", \"message info\")\n .text(\"Input parsed successfully.\")\n .append(buildSizeAndTimeInfoHtml(\n \"Parsing time and speed\",\n $(\"#input\").val().length,\n timeAfter - timeBefore\n ));\n $(\"#output\").removeClass(\"disabled\").text(jsDump.parse(output));\n\n var result = true;\n } catch (e) {\n $(\"#parse-message\").attr(\"class\", \"message error\").text(buildErrorMessage(e));\n\n var result = false;\n }\n\n doLayout();\n return result;\n }\n\n function buildAndParse() {\n build() && parse();\n }\n\n function scheduleBuildAndParse() {\n var nothingChanged = $(\"#grammar\").val() === oldGrammar\n && $(\"#parser-var\").val() === oldParserVar\n && $(\"#option-cache\").is(\":checked\") === oldOptionCache\n && $(\"#option-track-line-and-column\").is(\":checked\") === oldOptionTrackLineAndColumn;\n if (nothingChanged) { return; }\n\n if (buildAndParseTimer !== null) {\n clearTimeout(buildAndParseTimer);\n buildAndParseTimer = null;\n }\n if (parseTimer !== null) {\n clearTimeout(parseTimer);\n parseTimer = null;\n }\n\n buildAndParseTimer = setTimeout(function() {\n buildAndParse();\n buildAndParseTimer = null;\n }, 500);\n }\n\n function scheduleParse() {\n if ($(\"#input\").val() === oldInput) { return; }\n if (buildAndParseTimer !== null) { return; }\n\n if (parseTimer !== null) {\n clearTimeout(parseTimer);\n parseTimer = null;\n }\n\n parseTimer = setTimeout(function() {\n parse();\n parseTimer = null;\n }, 500);\n }\n\n function doLayout() {\n /*\n * This forces layout of the page so that the |#columns| table gets a chance\n * make itself smaller when the browser window shrinks.\n */\n if ($.browser.msie || $.browser.opera) {\n $(\"#left-column\").height(\"0px\");\n $(\"#right-column\").height(\"0px\");\n }\n $(\"#grammar\").height(\"0px\");\n $(\"#input\").height(\"0px\");\n\n if ($.browser.msie || $.browser.opera) {\n $(\"#left-column\").height(($(\"#left-column\").parent().innerHeight() - 2) + \"px\");\n $(\"#right-column\").height(($(\"#right-column\").parent().innerHeight() - 2) + \"px\");\n }\n\n $(\"#grammar\").height(($(\"#grammar\").parent().parent().innerHeight() - 14) + \"px\");\n $(\"#input\").height(($(\"#input\").parent().parent().innerHeight() - 14) + \"px\");\n }\n\n $(\"#grammar, #parser-var, #option-cache, #option-track-line-and-column\")\n .change(scheduleBuildAndParse)\n .mousedown(scheduleBuildAndParse)\n .mouseup(scheduleBuildAndParse)\n .click(scheduleBuildAndParse)\n .keydown(scheduleBuildAndParse)\n .keyup(scheduleBuildAndParse)\n .keypress(scheduleBuildAndParse);\n\n $(\"#input\")\n .change(scheduleParse)\n .mousedown(scheduleParse)\n .mouseup(scheduleParse)\n .click(scheduleParse)\n .keydown(scheduleParse)\n .keyup(scheduleParse)\n .keypress(scheduleParse);\n\n doLayout();\n $(window).resize(doLayout);\n\n $(\"#loader\").hide();\n $(\"#content\").show();\n\n $(\"#grammar, #parser-var, #option-cache, #option-track-line-and-column\").removeAttr(\"disabled\");\n\n $(\"#grammar\").focus();\n\n buildAndParse();\n});\n"}}
-{"repo": "billdueber/library_stdnums", "pr_number": 2, "title": "Add check for the prefix of an ISBN 13", "state": "closed", "merged_at": "2017-05-04T14:59:44Z", "additions": 14, "deletions": 2, "files_changed": ["lib/library_stdnums.rb", "spec/library_stdnums_spec.rb"], "files_before": {"lib/library_stdnums.rb": "# Static Module functions to work with library \"standard numbers\" ISSN, ISBN, and LCCN\nmodule StdNum\n\n # Helper methods common to ISBN/ISSN\n module Helpers\n\n # The pattern we use to try and find an ISBN/ISSN. Ditch everthing before the first\n # digit, then take all the digits/hyphens, optionally followed by an 'X'\n # Since the shortest possible string is 7 digits followed by a checksum digit\n # for an ISSN, we'll make sure they're at least that long. Still imperfect\n # (would fine \"5------\", for example) but should work in most cases.\n STDNUMPAT = /^.*?(\\d[\\d\\-]{6,}[xX]?)/\n\n # Extract the most likely looking number from the string. This will be the first\n # string of digits-and-hyphens-and-maybe-a-trailing-X, with the hypens removed\n # @param [String] str The string from which to extract an ISBN/ISSN\n # @return [String] The extracted identifier\n def extractNumber str\n match = STDNUMPAT.match str\n return nil unless match\n return (match[1].gsub(/\\-/, '')).upcase\n end\n\n # Same as STDNUMPAT but allowing for all numbers in the provided string\n STDNUMPAT_MULTIPLE = /.*?(\\d[\\d\\-]{6,}[xX]?)/\n\n # Extract the most likely looking numbers from the string. This will be each\n # string with digits-and-hyphens-and-maybe-a-trailing-X, with the hypens removed\n # @param [String] str The string from which to extract the ISBN/ISSNs\n # @return [Array] An array of extracted identifiers\n def extract_multiple_numbers(str)\n return [] if str == '' || str.nil?\n str.scan(STDNUMPAT_MULTIPLE).flatten.map{ |i| i.gsub(/\\-/, '').upcase }\n end\n\n # Given any string, extract what looks like the most likely ISBN/ISSN\n # of the given size(s), or nil if nothing matches at the correct size.\n # @param [String] rawnum The raw string containing (hopefully) an ISSN/ISBN\n # @param [Integer, Array, nil] valid_sizes An integer or array of integers of valid sizes\n # for this type (e.g., 10 or 13 for ISBN, 8 for ISSN)\n # @return [String,nil] the reduced and verified number, or nil if there's no match at the right size\n def reduce_to_basics rawnum, valid_sizes = nil\n return nil if rawnum.nil?\n\n num = extractNumber rawnum\n\n # Does it even look like a number?\n return nil unless num\n\n # Return what we've got if we don't care about the size\n return num unless valid_sizes\n\n # Check for valid size(s)\n [valid_sizes].flatten.each do |s|\n return num if num.size == s\n end\n\n # Didn't check out size-wise. Return nil\n return nil\n end\n end\n\n # Validate, convert, and normalize ISBNs (10-digit or 13-digit)\n module ISBN\n extend Helpers\n\n # Does it even look like an ISBN?\n def self.at_least_trying? isbn\n reduce_to_basics(isbn, [10,13]) ? true : false\n end\n\n\n # Compute check digits for 10 or 13-digit ISBNs. See algorithm at\n # http://en.wikipedia.org/wiki/International_Standard_Book_Number\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the ISBN has already been through reduce_to_basics\n # @return [String,nil] the one-character checkdigit, or nil if it's not an ISBN string\n def self.checkdigit isbn, preprocessed = false\n isbn = reduce_to_basics isbn, [10,13] unless preprocessed\n return nil unless isbn\n\n checkdigit = 0\n if isbn.size == 10\n digits = isbn[0..8].split(//).map {|i| i.to_i}\n (1..9).each do |i|\n checkdigit += digits[i-1] * i\n end\n checkdigit = checkdigit % 11\n return 'X' if checkdigit == 10\n return checkdigit.to_s\n else # size == 13\n checkdigit = 0\n digits = isbn[0..11].split(//).map {|i| i.to_i}\n 6.times do\n checkdigit += digits.shift\n checkdigit += digits.shift * 3\n end\n check = 10 - (checkdigit % 10)\n check = 0 if check == 10\n return check.to_s\n end\n end\n\n # Check to see if the checkdigit is correct\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the ISBN has already been through reduce_to_basics\n # @return [Boolean] Whether or not the checkdigit is correct. Sneakily, return 'nil' for\n # values that don't even look like ISBNs, and 'false' for those that look possible but\n # don't normalize / have bad checkdigits\n def self.valid? isbn, preprocessed = false\n return nil if isbn.nil?\n isbn = reduce_to_basics(isbn, [10,13]) unless preprocessed\n return nil unless isbn\n return false unless isbn[-1..-1] == self.checkdigit(isbn, true)\n return true\n end\n\n\n # For an ISBN, normalizing it is the same as converting to ISBN 13\n # and making sure it's valid\n #\n # @param [String] rawisbn The ISBN to normalize\n # @return [String, nil] the normalized (to 13 digit) ISBN, or nil on failure\n def self.normalize rawisbn\n isbn = convert_to_13 rawisbn\n if isbn\n return isbn\n else\n return nil\n end\n end\n\n # To convert to an ISBN13, throw a '978' on the front and\n # compute the checkdigit\n # We leave 13-digit numbers alone, figuring they're already ok. NO CHECKSUM CHECK IS DONE FOR 13-DIGIT ISBNS!\n # and return nil on anything that's not the right length\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @return [String, nil] The converted 13-character ISBN, nil if something looks wrong, or whatever was passed in if it already looked like a 13-digit ISBN\n def self.convert_to_13 isbn\n isbn = reduce_to_basics isbn, [10,13]\n return nil unless isbn\n return nil unless valid?(isbn, true)\n return isbn if isbn.size == 13\n prefix = '978' + isbn[0..8]\n return prefix + self.checkdigit(prefix + '0', true)\n end\n\n\n # Convert to 10 if it's 13 digits and the first three digits are 978.\n # Pass through anything 10-digits, and return nil for everything else.\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @return [String] The converted 10-character ISBN, nil if something looks wrong, or whatever was passed in if it already looked like a 10-digit ISBN\n def self.convert_to_10 isbn\n isbn = reduce_to_basics isbn, [10,13]\n\n # Already 10 digits? Just return\n return isbn if isbn.size == 10\n\n # Can't be converted to ISBN-10? Bail\n return nil unless isbn[0..2] == '978'\n\n prefix = isbn[3..11]\n return prefix + self.checkdigit(prefix + '0')\n end\n\n # Return an array of the ISBN13 and ISBN10 (in that order) for the passed in value. You'll\n # only get one value back if it's a 13-digit\n # ISBN that can't be converted to an ISBN10.\n # @param [String] isbn The original ISBN, in 10-character or 13-digit format\n # @return [Array, nil] Either the (one or two) normalized ISBNs, or nil if\n # it can't be recognized.\n #\n # @example Get the normalized values and index them (if valid) or original value (if not)\n # norms = StdNum::ISBN.allNormalizedValues(rawisbn)\n # doc['isbn'] = norms ? norms : [rawisbn]\n def self.allNormalizedValues isbn\n isbn = reduce_to_basics isbn, [10,13]\n return [] unless isbn\n case isbn.size\n when 10\n return [self.convert_to_13(isbn), isbn]\n when 13\n return [isbn, self.convert_to_10(isbn)].compact\n end\n end\n\n\n end\n\n # Validate and and normalize ISSNs\n module ISSN\n extend Helpers\n\n\n # Does it even look like an ISSN?\n def self.at_least_trying? issn\n return !(reduce_to_basics(issn, 8))\n end\n\n\n # Compute the checkdigit of an ISSN\n # @param [String] issn The ISSN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the number has already been through reduce_to_basic\n # @return [String] the one-character checkdigit\n\n def self.checkdigit issn, preprocessed = false\n issn = reduce_to_basics issn, 8 unless preprocessed\n return nil unless issn\n\n digits = issn[0..6].split(//).map {|i| i.to_i}\n checkdigit = 0\n (0..6).each do |i|\n checkdigit += digits[i] * (8 - i)\n end\n checkdigit = checkdigit % 11\n return '0' if checkdigit == 0\n checkdigit = 11 - checkdigit\n return 'X' if checkdigit == 10\n return checkdigit.to_s\n end\n\n # Check to see if the checkdigit is correct\n # @param [String] issn The ISSN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the number has already been through reduce_to_basic\n # @return [Boolean] Whether or not the checkdigit is correct. Sneakily, return 'nil' for\n # values that don't even look like ISBNs, and 'false' for those that look possible but\n # don't normalize / have bad checkdigits\n\n def self.valid? issn, preprocessed = false\n issn = reduce_to_basics issn, 8 unless preprocessed\n return nil unless issn\n return issn[-1..-1] == self.checkdigit(issn, true)\n end\n\n\n\n # Make sure it's valid, remove the dashes, uppercase the X, and return\n # @param [String] rawissn The ISSN to normalize\n # @return [String, nil] the normalized ISSN, or nil on failure\n def self.normalize rawissn\n issn = reduce_to_basics rawissn, 8\n if issn and valid?(issn, true)\n return issn\n else\n return nil\n end\n end\n\n\n\n end\n\n # Validate and and normalize LCCNs\n module LCCN\n\n\n # Get a string ready for processing as an LCCN\n # @param [String] str The possible lccn\n # @return [String] The munged string, ready for normalization\n\n def self.reduce_to_basic str\n rv = str.gsub(/\\s/, '') # ditch spaces\n rv.gsub!('http://lccn.loc.gov/', '') # remove URI prefix\n rv.gsub!(/\\/.*$/, '') # ditch everything after the first '/' (including the slash)\n return rv\n end\n\n # Normalize based on data at http://www.loc.gov/marc/lccn-namespace.html#syntax\n # @param [String] rawlccn The possible LCCN to normalize\n # @return [String, nil] the normalized LCCN, or nil if it looks malformed\n def self.normalize rawlccn\n lccn = reduce_to_basic(rawlccn)\n # If there's a dash in it, deal with that.\n if lccn =~ /^(.*?)\\-(.+)/\n pre = $1\n post = $2\n return nil unless post =~ /^\\d+$/ # must be all digits\n lccn = \"%s%06d\" % [pre, post.to_i]\n end\n\n if valid?(lccn, true)\n return lccn\n else\n return nil\n end\n end\n\n # The rules for validity according to http://www.loc.gov/marc/lccn-namespace.html#syntax:\n #\n # A normalized LCCN is a character string eight to twelve characters in length. (For purposes of this description characters are ordered from left to right -- \"first\" means \"leftmost\".)\n # The rightmost eight characters are always digits.\n # If the length is 9, then the first character must be alphabetic.\n # If the length is 10, then the first two characters must be either both digits or both alphabetic.\n # If the length is 11, then the first character must be alphabetic and the next two characters must be either both digits or both alphabetic.\n # If the length is 12, then the first two characters must be alphabetic and the remaining characters digits.\n #\n # @param [String] lccn The lccn to attempt to validate\n # @param [Boolean] preprocessed Set to true if the number has already been normalized\n # @return [Boolean] Whether or not the syntax seems ok\n\n def self.valid? lccn, preprocessed = false\n lccn = normalize(lccn) unless preprocessed\n return false unless lccn\n clean = lccn.gsub(/\\-/, '')\n suffix = clean[-8..-1] # \"the rightmost eight characters are always digits\"\n return false unless suffix and suffix =~ /^\\d+$/\n case clean.size # \"...is a character string eight to twelve digits in length\"\n when 8\n return true\n when 9\n return true if clean =~ /^[A-Za-z]/\n when 10\n return true if clean =~ /^\\d{2}/ or clean =~ /^[A-Za-z]{2}/\n when 11\n return true if clean =~ /^[A-Za-z](\\d{2}|[A-Za-z]{2})/\n when 12\n return true if clean =~ /^[A-Za-z]{2}\\d{2}/\n else\n return false\n end\n\n return false\n end\n\n end\n\nend\n\n", "spec/library_stdnums_spec.rb": "require 'spec_helper'\n\ndescribe \"Extract\" do\n it \"should leave a number alone\" do\n StdNum::ISBN.extractNumber('1234567').must_equal '1234567'\n end\n\n it \"should skip leading and trailing crap\" do\n StdNum::ISBN.extractNumber(' 1234567 (online)').must_equal '1234567'\n end\n\n it \"should allow hyphens\" do\n StdNum::ISBN.extractNumber(' 1-234-5').must_equal '12345'\n end\n\n it \"should return nil on a non-match\" do\n StdNum::ISBN.extractNumber('bill dueber').must_equal nil\n end\n\n it \"should allow a trailing X\" do\n StdNum::ISBN.extractNumber('1-234-5-X').must_equal '12345X'\n end\n\n it \"should upcase any trailing X\" do\n StdNum::ISBN.extractNumber('1-234-56-x').must_equal '123456X'\n end\n\n it \"only allows a single trailing X\" do\n StdNum::ISBN.extractNumber('123456-X-X').must_equal '123456X'\n end\n\n it \"doesn't allow numbers that are too short\" do\n StdNum::ISBN.extractNumber('12345').must_equal nil\n end\n\n let(:identifiers_string) { '9780987115423 (print ed) 9780987115430 (web ed)' }\n it \"will extract multiple identifiers\" do\n StdNum::ISBN.extract_multiple_numbers(identifiers_string).must_be_kind_of Array\n StdNum::ISBN.extract_multiple_numbers(identifiers_string).count.must_equal 2\n StdNum::ISBN.extract_multiple_numbers(identifiers_string)[0].must_equal '9780987115423'\n StdNum::ISBN.extract_multiple_numbers(identifiers_string)[1].must_equal '9780987115430'\n end\n\n let(:string_with_no_identifiers) { 'This has no identifiers' }\n it \"will return an empty array when no identifiers are in the supplied string \" do\n StdNum::ISBN.extract_multiple_numbers(string_with_no_identifiers).must_be_kind_of Array\n StdNum::ISBN.extract_multiple_numbers(string_with_no_identifiers).count.must_equal 0\n\n StdNum::ISBN.extract_multiple_numbers('').must_be_kind_of Array\n StdNum::ISBN.extract_multiple_numbers('').count.must_equal 0\n end\n it \"skips over short prefixing numbers\" do\n StdNum::ISBN.extractNumber('ISBN13: 1234567890123').must_equal '1234567890123'\n end\n\nend\n\n\ndescribe \"ISBN\" do\n it \"computes 10-digit checksum\" do\n StdNum::ISBN.checkdigit('0-306-40615-X').must_equal '2'\n end\n\n it \"correctly uses X for checksum\" do\n StdNum::ISBN.checkdigit('061871460X').must_equal 'X'\n end\n\n it \"finds a zero checkdigit\" do\n StdNum::ISBN.checkdigit('0139381430').must_equal '0'\n end\n\n it \"computes 13-digit checksum\" do\n StdNum::ISBN.checkdigit('9780306406157').must_equal '7'\n end\n\n it \"computes a 13-digit checksum that is 0\" do\n StdNum::ISBN.checkdigit('9783837612950').must_equal '0'\n end\n\n it \"finds a good number valid\" do\n StdNum::ISBN.valid?('9780306406157').must_equal true\n end\n\n it \"says a good number is trying\" do\n StdNum::ISBN.at_least_trying?('9780306406157').must_equal true\n end\n\n it \"says bad data is not trying\" do\n StdNum::ISBN.at_least_trying?('978006406157').must_equal false\n StdNum::ISBN.at_least_trying?('406157').must_equal false\n StdNum::ISBN.at_least_trying?('$22').must_equal false\n StdNum::ISBN.at_least_trying?('hello').must_equal false\n end\n\n\n it \"finds a bad number invalid\" do\n StdNum::ISBN.valid?('9780306406154').must_equal false\n end\n\n it \"returns nil when computing checksum for bad ISBN\" do\n StdNum::ISBN.checkdigit('12345').must_equal nil\n end\n\n it \"converts 10 to 13\" do\n StdNum::ISBN.convert_to_13('0-306-40615-2').must_equal '9780306406157'\n end\n\n it \"passes through 13 digit number instead of converting to 13\" do\n StdNum::ISBN.convert_to_13('9780306406157').must_equal '9780306406157'\n end\n\n it \"converts 13 to 10\" do\n StdNum::ISBN.convert_to_10('978-0-306-40615-7').must_equal '0306406152'\n end\n\n it \"normalizes\" do\n StdNum::ISBN.normalize('0-306-40615-2').must_equal '9780306406157'\n StdNum::ISBN.normalize('0-306-40615-X').must_equal nil\n StdNum::ISBN.normalize('ISBN: 978-0-306-40615-7').must_equal '9780306406157'\n StdNum::ISBN.normalize('ISBN: 978-0-306-40615-3').must_equal nil\n end\n\n it \"gets both normalized values\" do\n a = StdNum::ISBN.allNormalizedValues('978-0-306-40615-7')\n a.sort.must_equal ['9780306406157', '0306406152' ].sort\n\n a = StdNum::ISBN.allNormalizedValues('0-306-40615-2')\n a.sort.must_equal ['9780306406157', '0306406152' ].sort\n end\n\n\n\nend\n\n\n\ndescribe 'ISSN' do\n it \"computes checksum\" do\n StdNum::ISSN.checkdigit('0378-5955').must_equal '5'\n end\n\n it \"normalizes\" do\n StdNum::ISSN.normalize('0378-5955').must_equal '03785955'\n end\nend\n\n\ndescribe 'LCCN basics' do\n\n # Tests take from http://www.loc.gov/marc/lccn-namespace.html#syntax\n test = {\n \"n78-890351\" => \"n78890351\",\n \"n78-89035\" => \"n78089035\",\n \"n 78890351 \" => \"n78890351\",\n \" 85000002 \" => \"85000002\",\n \"85-2 \" => \"85000002\",\n \"2001-000002\" => \"2001000002\",\n \"75-425165//r75\" => \"75425165\",\n \" 79139101 /AC/r932\" => \"79139101\",\n }\n\n test.each do |k, v|\n it \"normalizes #{k}\" do\n StdNum::LCCN.normalize(k.dup).must_equal v\n end\n end\n\n it \"validates correctly\" do\n StdNum::LCCN.valid?(\"n78-890351\").must_equal true\n StdNum::LCCN.valid?(\"n78-89035100444\").must_equal false, \"Too long\"\n StdNum::LCCN.valid?(\"n78\").must_equal false, \"Too short\"\n StdNum::LCCN.valid?(\"na078-890351\").must_equal false, \"naa78-890351 should start with three letters or digits\"\n StdNum::LCCN.valid?(\"n078-890351\").must_equal false, \"n078-890351 should start with two letters or two digits\"\n StdNum::LCCN.valid?(\"na078-890351\").must_equal false, \"na078-890351 should start with three letters or digits\"\n StdNum::LCCN.valid?(\"0an78-890351\").must_equal false, \"0an78-890351 should start with three letters or digits\"\n StdNum::LCCN.valid?(\"n78-89c0351\").must_equal false, \"n78-89c0351 has a letter after the dash\"\n end\n\n\nend\n\n\ndescribe \"LCCN tests from Business::LCCN perl module\" do\n tests = [\n { :orig => 'n78-890351',\n :canonical => 'n 78890351 ',\n :normalized => 'n78890351',\n :prefix => 'n',\n :year_cataloged => 1978,\n :serial => '890351',\n },\n { :orig => 'n 78890351 ',\n :canonical => 'n 78890351 ',\n :normalized => 'n78890351',\n :prefix => 'n',\n :year_cataloged => 1978,\n :serial => '890351',\n },\n { :orig => ' 85000002 ',\n :canonical => ' 85000002 ',\n :normalized => '85000002',\n :year_cataloged => 1985,\n :serial => '000002',\n },\n { :orig => '85-2 ',\n :canonical => ' 85000002 ',\n :normalized => '85000002',\n :year_cataloged => 1985,\n :serial => '000002',\n },\n { :orig => '2001-000002',\n :canonical => ' 2001000002',\n :normalized => '2001000002',\n :year_cataloged => 2001,\n :serial => '000002',\n },\n { :orig => '75-425165//r75',\n :canonical => ' 75425165 //r75',\n :normalized => '75425165',\n :prefix => '',\n :year_cataloged => nil,\n :serial => '425165',\n :revision_year => 1975,\n :revision_year_encoded => '75',\n :revision_number => nil,\n },\n { :orig => ' 79139101 /AC/r932',\n :canonical => ' 79139101 /AC/r932',\n :normalized => '79139101',\n :prefix => '',\n :year_cataloged => nil,\n :serial => '139101',\n :suffix_encoded => '/AC',\n :revision_year => 1993,\n :revision_year_encoded => '93',\n :revision_number => 2,\n },\n { :orig => '89-4',\n :canonical => ' 89000004 ',\n :normalized => '89000004',\n :year_cataloged => 1989,\n :serial => '000004',\n },\n { :orig => '89-45',\n :canonical => ' 89000045 ',\n :normalized => '89000045',\n :year_cataloged => 1989,\n :serial => '000045',\n },\n { :orig => '89-456',\n :canonical => ' 89000456 ',\n :normalized => '89000456',\n :year_cataloged => 1989,\n :serial => '000456',\n },\n { :orig => '89-1234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => '89-001234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => '89001234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => '2002-1234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => '2002-001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => '2002001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => ' 89001234 ',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => ' 2002001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a89-1234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a89-001234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a89001234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a2002-1234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a2002-001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a2002001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a 89001234 ',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a 89-001234 ',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a 2002001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab89-1234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab89-001234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab89001234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab2002-1234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab2002-001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab2002001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab 89001234 ',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab 2002001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab 89-1234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89-1234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89-001234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89001234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89001234 ',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/89001234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/a89001234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :serial => '001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n },\n { :orig => 'http://lccn.loc.gov/ab89001234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/abc89001234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/2002001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/a2002001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/ab2002001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => '00-21595',\n :canonical => ' 00021595 ',\n :normalized => '00021595',\n :year_cataloged => 2000,\n :serial => '021595',\n },\n { :orig => '2001001599',\n :canonical => ' 2001001599',\n :normalized => '2001001599',\n :year_cataloged => 2001,\n :serial => '001599',\n },\n { :orig => '99-18233',\n :canonical => ' 99018233 ',\n :normalized => '99018233',\n :year_cataloged => 1999,\n :serial => '018233',\n },\n { :orig => '98000595',\n :canonical => ' 98000595 ',\n :normalized => '98000595',\n :year_cataloged => 1898,\n :serial => '000595',\n },\n { :orig => '99005074',\n :canonical => ' 99005074 ',\n :normalized => '99005074',\n :year_cataloged => 1899,\n :serial => '005074',\n },\n { :orig => '00003373',\n :canonical => ' 00003373 ',\n :normalized => '00003373',\n :year_cataloged => 1900,\n :serial => '003373',\n },\n { :orig => '01001599',\n :canonical => ' 01001599 ',\n :normalized => '01001599',\n :year_cataloged => 1901,\n :serial => '001599',\n },\n { :orig => ' 95156543 ',\n :canonical => ' 95156543 ',\n :normalized => '95156543',\n :year_cataloged => 1995,\n :serial => '156543',\n },\n { :orig => ' 94014580 /AC/r95',\n :canonical => ' 94014580 /AC/r95',\n :normalized => '94014580',\n :year_cataloged => 1994,\n :serial => '014580',\n :suffix_encoded => '/AC',\n :revision_year_encoded => '95',\n :revision_year => 1995,\n },\n { :orig => ' 79310919 //r86',\n :canonical => ' 79310919 //r86',\n :normalized => '79310919',\n :year_cataloged => 1979,\n :serial => '310919',\n :revision_year_encoded => '86',\n :revision_year => 1986,\n },\n { :orig => 'gm 71005810 ',\n :canonical => 'gm 71005810 ',\n :normalized => 'gm71005810',\n :prefix => 'gm',\n :year_cataloged => 1971,\n :serial => '005810',\n },\n { :orig => 'sn2006058112 ',\n :canonical => 'sn2006058112',\n :normalized => 'sn2006058112',\n :prefix => 'sn',\n :year_cataloged => 2006,\n :serial => '058112',\n },\n { :orig => 'gm 71-2450',\n :canonical => 'gm 71002450 ',\n :normalized => 'gm71002450',\n :prefix => 'gm',\n :year_cataloged => 1971,\n :serial => '002450',\n },\n { :orig => '2001-1114',\n :canonical => ' 2001001114',\n :normalized => '2001001114',\n :year_cataloged => 2001,\n :serial => '001114',\n },\n ]\n tests.each do |h|\n it \"normalizes #{h[:orig]}\" do\n StdNum::LCCN.normalize(h[:orig]).must_equal h[:normalized], \"#{h[:orig]} doesn't normalize to #{h[:normalized]}\"\n end\n end\nend\n\n"}, "files_after": {"lib/library_stdnums.rb": "# Static Module functions to work with library \"standard numbers\" ISSN, ISBN, and LCCN\nmodule StdNum\n\n # Helper methods common to ISBN/ISSN\n module Helpers\n\n # The pattern we use to try and find an ISBN/ISSN. Ditch everthing before the first\n # digit, then take all the digits/hyphens, optionally followed by an 'X'\n # Since the shortest possible string is 7 digits followed by a checksum digit\n # for an ISSN, we'll make sure they're at least that long. Still imperfect\n # (would fine \"5------\", for example) but should work in most cases.\n STDNUMPAT = /^.*?(\\d[\\d\\-]{6,}[xX]?)/\n\n # Extract the most likely looking number from the string. This will be the first\n # string of digits-and-hyphens-and-maybe-a-trailing-X, with the hypens removed\n # @param [String] str The string from which to extract an ISBN/ISSN\n # @return [String] The extracted identifier\n def extractNumber str\n match = STDNUMPAT.match str\n return nil unless match\n return (match[1].gsub(/\\-/, '')).upcase\n end\n\n # Same as STDNUMPAT but allowing for all numbers in the provided string\n STDNUMPAT_MULTIPLE = /.*?(\\d[\\d\\-]{6,}[xX]?)/\n\n # Extract the most likely looking numbers from the string. This will be each\n # string with digits-and-hyphens-and-maybe-a-trailing-X, with the hypens removed\n # @param [String] str The string from which to extract the ISBN/ISSNs\n # @return [Array] An array of extracted identifiers\n def extract_multiple_numbers(str)\n return [] if str == '' || str.nil?\n str.scan(STDNUMPAT_MULTIPLE).flatten.map{ |i| i.gsub(/\\-/, '').upcase }\n end\n\n # Given any string, extract what looks like the most likely ISBN/ISSN\n # of the given size(s), or nil if nothing matches at the correct size.\n # @param [String] rawnum The raw string containing (hopefully) an ISSN/ISBN\n # @param [Integer, Array, nil] valid_sizes An integer or array of integers of valid sizes\n # for this type (e.g., 10 or 13 for ISBN, 8 for ISSN)\n # @return [String,nil] the reduced and verified number, or nil if there's no match at the right size\n def reduce_to_basics rawnum, valid_sizes = nil\n return nil if rawnum.nil?\n\n num = extractNumber rawnum\n\n # Does it even look like a number?\n return nil unless num\n\n # Return what we've got if we don't care about the size\n return num unless valid_sizes\n\n # Check for valid size(s)\n [valid_sizes].flatten.each do |s|\n return num if num.size == s\n end\n\n # Didn't check out size-wise. Return nil\n return nil\n end\n end\n\n # Validate, convert, and normalize ISBNs (10-digit or 13-digit)\n module ISBN\n extend Helpers\n\n # Does it even look like an ISBN?\n def self.at_least_trying? isbn\n reduce_to_basics(isbn, [10,13]) ? true : false\n end\n\n\n # Compute check digits for 10 or 13-digit ISBNs. See algorithm at\n # http://en.wikipedia.org/wiki/International_Standard_Book_Number\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the ISBN has already been through reduce_to_basics\n # @return [String,nil] the one-character checkdigit, or nil if it's not an ISBN string\n def self.checkdigit isbn, preprocessed = false\n isbn = reduce_to_basics isbn, [10,13] unless preprocessed\n return nil unless isbn\n\n checkdigit = 0\n if isbn.size == 10\n digits = isbn[0..8].split(//).map {|i| i.to_i}\n (1..9).each do |i|\n checkdigit += digits[i-1] * i\n end\n checkdigit = checkdigit % 11\n return 'X' if checkdigit == 10\n return checkdigit.to_s\n else # size == 13\n checkdigit = 0\n digits = isbn[0..11].split(//).map {|i| i.to_i}\n 6.times do\n checkdigit += digits.shift\n checkdigit += digits.shift * 3\n end\n check = 10 - (checkdigit % 10)\n check = 0 if check == 10\n return check.to_s\n end\n end\n\n # Check to see if the checkdigit is correct\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the ISBN has already been through reduce_to_basics\n # @return [Boolean] Whether or not the checkdigit is correct. Sneakily, return 'nil' for\n # values that don't even look like ISBNs, and 'false' for those that look possible but\n # don't normalize / have bad checkdigits\n def self.valid? isbn, preprocessed = false\n return nil if isbn.nil?\n isbn = reduce_to_basics(isbn, [10,13]) unless preprocessed\n return nil unless isbn\n return false unless isbn[-1..-1] == self.checkdigit(isbn, true)\n return false unless isbn.size == 10 || valid_isbn13_prefix?(isbn)\n return true\n end\n\n\n # For an ISBN, normalizing it is the same as converting to ISBN 13\n # and making sure it's valid\n #\n # @param [String] rawisbn The ISBN to normalize\n # @return [String, nil] the normalized (to 13 digit) ISBN, or nil on failure\n def self.normalize rawisbn\n isbn = convert_to_13 rawisbn\n if isbn\n return isbn\n else\n return nil\n end\n end\n\n # To convert to an ISBN13, throw a '978' on the front and\n # compute the checkdigit\n # We leave 13-digit numbers alone, figuring they're already ok. NO CHECKSUM CHECK IS DONE FOR 13-DIGIT ISBNS!\n # and return nil on anything that's not the right length\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @return [String, nil] The converted 13-character ISBN, nil if something looks wrong, or whatever was passed in if it already looked like a 13-digit ISBN\n def self.convert_to_13 isbn\n isbn = reduce_to_basics isbn, [10,13]\n return nil unless isbn\n return nil unless valid?(isbn, true)\n return isbn if isbn.size == 13\n prefix = '978' + isbn[0..8]\n return prefix + self.checkdigit(prefix + '0', true)\n end\n\n\n # Convert to 10 if it's 13 digits and the first three digits are 978.\n # Pass through anything 10-digits, and return nil for everything else.\n # @param [String] isbn The ISBN (we'll try to clean it up if possible)\n # @return [String] The converted 10-character ISBN, nil if something looks wrong, or whatever was passed in if it already looked like a 10-digit ISBN\n def self.convert_to_10 isbn\n isbn = reduce_to_basics isbn, [10,13]\n\n # Already 10 digits? Just return\n return isbn if isbn.size == 10\n\n # Can't be converted to ISBN-10? Bail\n return nil unless isbn[0..2] == '978'\n\n prefix = isbn[3..11]\n return prefix + self.checkdigit(prefix + '0')\n end\n\n # Return an array of the ISBN13 and ISBN10 (in that order) for the passed in value. You'll\n # only get one value back if it's a 13-digit\n # ISBN that can't be converted to an ISBN10.\n # @param [String] isbn The original ISBN, in 10-character or 13-digit format\n # @return [Array, nil] Either the (one or two) normalized ISBNs, or nil if\n # it can't be recognized.\n #\n # @example Get the normalized values and index them (if valid) or original value (if not)\n # norms = StdNum::ISBN.allNormalizedValues(rawisbn)\n # doc['isbn'] = norms ? norms : [rawisbn]\n def self.allNormalizedValues isbn\n isbn = reduce_to_basics isbn, [10,13]\n return [] unless isbn\n case isbn.size\n when 10\n return [self.convert_to_13(isbn), isbn]\n when 13\n return [isbn, self.convert_to_10(isbn)].compact\n end\n end\n\n # Checks for a valid ISBN13 prefix\n # ISBN13 always starts with 978 or 979. For example: 1000000000012 has a valid check digit, but\n # is not a valid ISBN13.\n # @param [String] isbn13 The ISBN13 to be checked.\n # @return [Boolean] If true then the prefix is valid\n def self.valid_isbn13_prefix?(isbn13)\n return false unless isbn13.size == 13\n ['978', '979'].map { |prefix| isbn13.start_with?(prefix) }.any?\n end\n end\n\n # Validate and and normalize ISSNs\n module ISSN\n extend Helpers\n\n\n # Does it even look like an ISSN?\n def self.at_least_trying? issn\n return !(reduce_to_basics(issn, 8))\n end\n\n\n # Compute the checkdigit of an ISSN\n # @param [String] issn The ISSN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the number has already been through reduce_to_basic\n # @return [String] the one-character checkdigit\n\n def self.checkdigit issn, preprocessed = false\n issn = reduce_to_basics issn, 8 unless preprocessed\n return nil unless issn\n\n digits = issn[0..6].split(//).map {|i| i.to_i}\n checkdigit = 0\n (0..6).each do |i|\n checkdigit += digits[i] * (8 - i)\n end\n checkdigit = checkdigit % 11\n return '0' if checkdigit == 0\n checkdigit = 11 - checkdigit\n return 'X' if checkdigit == 10\n return checkdigit.to_s\n end\n\n # Check to see if the checkdigit is correct\n # @param [String] issn The ISSN (we'll try to clean it up if possible)\n # @param [Boolean] preprocessed Set to true if the number has already been through reduce_to_basic\n # @return [Boolean] Whether or not the checkdigit is correct. Sneakily, return 'nil' for\n # values that don't even look like ISBNs, and 'false' for those that look possible but\n # don't normalize / have bad checkdigits\n\n def self.valid? issn, preprocessed = false\n issn = reduce_to_basics issn, 8 unless preprocessed\n return nil unless issn\n return issn[-1..-1] == self.checkdigit(issn, true)\n end\n\n\n\n # Make sure it's valid, remove the dashes, uppercase the X, and return\n # @param [String] rawissn The ISSN to normalize\n # @return [String, nil] the normalized ISSN, or nil on failure\n def self.normalize rawissn\n issn = reduce_to_basics rawissn, 8\n if issn and valid?(issn, true)\n return issn\n else\n return nil\n end\n end\n\n\n\n end\n\n # Validate and and normalize LCCNs\n module LCCN\n\n\n # Get a string ready for processing as an LCCN\n # @param [String] str The possible lccn\n # @return [String] The munged string, ready for normalization\n\n def self.reduce_to_basic str\n rv = str.gsub(/\\s/, '') # ditch spaces\n rv.gsub!('http://lccn.loc.gov/', '') # remove URI prefix\n rv.gsub!(/\\/.*$/, '') # ditch everything after the first '/' (including the slash)\n return rv\n end\n\n # Normalize based on data at http://www.loc.gov/marc/lccn-namespace.html#syntax\n # @param [String] rawlccn The possible LCCN to normalize\n # @return [String, nil] the normalized LCCN, or nil if it looks malformed\n def self.normalize rawlccn\n lccn = reduce_to_basic(rawlccn)\n # If there's a dash in it, deal with that.\n if lccn =~ /^(.*?)\\-(.+)/\n pre = $1\n post = $2\n return nil unless post =~ /^\\d+$/ # must be all digits\n lccn = \"%s%06d\" % [pre, post.to_i]\n end\n\n if valid?(lccn, true)\n return lccn\n else\n return nil\n end\n end\n\n # The rules for validity according to http://www.loc.gov/marc/lccn-namespace.html#syntax:\n #\n # A normalized LCCN is a character string eight to twelve characters in length. (For purposes of this description characters are ordered from left to right -- \"first\" means \"leftmost\".)\n # The rightmost eight characters are always digits.\n # If the length is 9, then the first character must be alphabetic.\n # If the length is 10, then the first two characters must be either both digits or both alphabetic.\n # If the length is 11, then the first character must be alphabetic and the next two characters must be either both digits or both alphabetic.\n # If the length is 12, then the first two characters must be alphabetic and the remaining characters digits.\n #\n # @param [String] lccn The lccn to attempt to validate\n # @param [Boolean] preprocessed Set to true if the number has already been normalized\n # @return [Boolean] Whether or not the syntax seems ok\n\n def self.valid? lccn, preprocessed = false\n lccn = normalize(lccn) unless preprocessed\n return false unless lccn\n clean = lccn.gsub(/\\-/, '')\n suffix = clean[-8..-1] # \"the rightmost eight characters are always digits\"\n return false unless suffix and suffix =~ /^\\d+$/\n case clean.size # \"...is a character string eight to twelve digits in length\"\n when 8\n return true\n when 9\n return true if clean =~ /^[A-Za-z]/\n when 10\n return true if clean =~ /^\\d{2}/ or clean =~ /^[A-Za-z]{2}/\n when 11\n return true if clean =~ /^[A-Za-z](\\d{2}|[A-Za-z]{2})/\n when 12\n return true if clean =~ /^[A-Za-z]{2}\\d{2}/\n else\n return false\n end\n\n return false\n end\n\n end\n\nend\n\n", "spec/library_stdnums_spec.rb": "require 'spec_helper'\n\ndescribe \"Extract\" do\n it \"should leave a number alone\" do\n StdNum::ISBN.extractNumber('1234567').must_equal '1234567'\n end\n\n it \"should skip leading and trailing crap\" do\n StdNum::ISBN.extractNumber(' 1234567 (online)').must_equal '1234567'\n end\n\n it \"should allow hyphens\" do\n StdNum::ISBN.extractNumber(' 1-234-5').must_equal '12345'\n end\n\n it \"should return nil on a non-match\" do\n StdNum::ISBN.extractNumber('bill dueber').must_equal nil\n end\n\n it \"should allow a trailing X\" do\n StdNum::ISBN.extractNumber('1-234-5-X').must_equal '12345X'\n end\n\n it \"should upcase any trailing X\" do\n StdNum::ISBN.extractNumber('1-234-56-x').must_equal '123456X'\n end\n\n it \"only allows a single trailing X\" do\n StdNum::ISBN.extractNumber('123456-X-X').must_equal '123456X'\n end\n\n it \"doesn't allow numbers that are too short\" do\n StdNum::ISBN.extractNumber('12345').must_equal nil\n end\n\n let(:identifiers_string) { '9780987115423 (print ed) 9780987115430 (web ed)' }\n it \"will extract multiple identifiers\" do\n StdNum::ISBN.extract_multiple_numbers(identifiers_string).must_be_kind_of Array\n StdNum::ISBN.extract_multiple_numbers(identifiers_string).count.must_equal 2\n StdNum::ISBN.extract_multiple_numbers(identifiers_string)[0].must_equal '9780987115423'\n StdNum::ISBN.extract_multiple_numbers(identifiers_string)[1].must_equal '9780987115430'\n end\n\n let(:string_with_no_identifiers) { 'This has no identifiers' }\n it \"will return an empty array when no identifiers are in the supplied string \" do\n StdNum::ISBN.extract_multiple_numbers(string_with_no_identifiers).must_be_kind_of Array\n StdNum::ISBN.extract_multiple_numbers(string_with_no_identifiers).count.must_equal 0\n\n StdNum::ISBN.extract_multiple_numbers('').must_be_kind_of Array\n StdNum::ISBN.extract_multiple_numbers('').count.must_equal 0\n end\n it \"skips over short prefixing numbers\" do\n StdNum::ISBN.extractNumber('ISBN13: 1234567890123').must_equal '1234567890123'\n end\n\nend\n\n\ndescribe \"ISBN\" do\n it \"computes 10-digit checksum\" do\n StdNum::ISBN.checkdigit('0-306-40615-X').must_equal '2'\n end\n\n it \"correctly uses X for checksum\" do\n StdNum::ISBN.checkdigit('061871460X').must_equal 'X'\n end\n\n it \"finds a zero checkdigit\" do\n StdNum::ISBN.checkdigit('0139381430').must_equal '0'\n end\n\n it \"computes 13-digit checksum\" do\n StdNum::ISBN.checkdigit('9780306406157').must_equal '7'\n end\n\n it \"computes a 13-digit checksum that is 0\" do\n StdNum::ISBN.checkdigit('9783837612950').must_equal '0'\n end\n\n it \"finds a good number valid\" do\n StdNum::ISBN.valid?('9780306406157').must_equal true\n end\n\n it \"says a good number is trying\" do\n StdNum::ISBN.at_least_trying?('9780306406157').must_equal true\n end\n\n it \"says bad data is not trying\" do\n StdNum::ISBN.at_least_trying?('978006406157').must_equal false\n StdNum::ISBN.at_least_trying?('406157').must_equal false\n StdNum::ISBN.at_least_trying?('$22').must_equal false\n StdNum::ISBN.at_least_trying?('hello').must_equal false\n end\n\n\n it \"finds a bad number invalid\" do\n StdNum::ISBN.valid?('9780306406154').must_equal false\n end\n\n it \"returns nil when computing checksum for bad ISBN\" do\n StdNum::ISBN.checkdigit('12345').must_equal nil\n end\n\n it \"converts 10 to 13\" do\n StdNum::ISBN.convert_to_13('0-306-40615-2').must_equal '9780306406157'\n end\n\n it \"passes through 13 digit number instead of converting to 13\" do\n StdNum::ISBN.convert_to_13('9780306406157').must_equal '9780306406157'\n end\n\n it \"converts 13 to 10\" do\n StdNum::ISBN.convert_to_10('978-0-306-40615-7').must_equal '0306406152'\n end\n\n it \"normalizes\" do\n StdNum::ISBN.normalize('0-306-40615-2').must_equal '9780306406157'\n StdNum::ISBN.normalize('0-306-40615-X').must_equal nil\n StdNum::ISBN.normalize('ISBN: 978-0-306-40615-7').must_equal '9780306406157'\n StdNum::ISBN.normalize('ISBN: 978-0-306-40615-3').must_equal nil\n end\n\n it \"gets both normalized values\" do\n a = StdNum::ISBN.allNormalizedValues('978-0-306-40615-7')\n a.sort.must_equal ['9780306406157', '0306406152' ].sort\n\n a = StdNum::ISBN.allNormalizedValues('0-306-40615-2')\n a.sort.must_equal ['9780306406157', '0306406152' ].sort\n end\n\n it \"identifies an invalid ISBN13 due to the prefix\" do\n StdNum::ISBN.valid_isbn13_prefix?('9780000000002').must_equal true\n StdNum::ISBN.valid?('1000000000012').must_equal false\n end\n\nend\n\n\n\ndescribe 'ISSN' do\n it \"computes checksum\" do\n StdNum::ISSN.checkdigit('0378-5955').must_equal '5'\n end\n\n it \"normalizes\" do\n StdNum::ISSN.normalize('0378-5955').must_equal '03785955'\n end\nend\n\n\ndescribe 'LCCN basics' do\n\n # Tests take from http://www.loc.gov/marc/lccn-namespace.html#syntax\n test = {\n \"n78-890351\" => \"n78890351\",\n \"n78-89035\" => \"n78089035\",\n \"n 78890351 \" => \"n78890351\",\n \" 85000002 \" => \"85000002\",\n \"85-2 \" => \"85000002\",\n \"2001-000002\" => \"2001000002\",\n \"75-425165//r75\" => \"75425165\",\n \" 79139101 /AC/r932\" => \"79139101\",\n }\n\n test.each do |k, v|\n it \"normalizes #{k}\" do\n StdNum::LCCN.normalize(k.dup).must_equal v\n end\n end\n\n it \"validates correctly\" do\n StdNum::LCCN.valid?(\"n78-890351\").must_equal true\n StdNum::LCCN.valid?(\"n78-89035100444\").must_equal false, \"Too long\"\n StdNum::LCCN.valid?(\"n78\").must_equal false, \"Too short\"\n StdNum::LCCN.valid?(\"na078-890351\").must_equal false, \"naa78-890351 should start with three letters or digits\"\n StdNum::LCCN.valid?(\"n078-890351\").must_equal false, \"n078-890351 should start with two letters or two digits\"\n StdNum::LCCN.valid?(\"na078-890351\").must_equal false, \"na078-890351 should start with three letters or digits\"\n StdNum::LCCN.valid?(\"0an78-890351\").must_equal false, \"0an78-890351 should start with three letters or digits\"\n StdNum::LCCN.valid?(\"n78-89c0351\").must_equal false, \"n78-89c0351 has a letter after the dash\"\n end\n\n\nend\n\n\ndescribe \"LCCN tests from Business::LCCN perl module\" do\n tests = [\n { :orig => 'n78-890351',\n :canonical => 'n 78890351 ',\n :normalized => 'n78890351',\n :prefix => 'n',\n :year_cataloged => 1978,\n :serial => '890351',\n },\n { :orig => 'n 78890351 ',\n :canonical => 'n 78890351 ',\n :normalized => 'n78890351',\n :prefix => 'n',\n :year_cataloged => 1978,\n :serial => '890351',\n },\n { :orig => ' 85000002 ',\n :canonical => ' 85000002 ',\n :normalized => '85000002',\n :year_cataloged => 1985,\n :serial => '000002',\n },\n { :orig => '85-2 ',\n :canonical => ' 85000002 ',\n :normalized => '85000002',\n :year_cataloged => 1985,\n :serial => '000002',\n },\n { :orig => '2001-000002',\n :canonical => ' 2001000002',\n :normalized => '2001000002',\n :year_cataloged => 2001,\n :serial => '000002',\n },\n { :orig => '75-425165//r75',\n :canonical => ' 75425165 //r75',\n :normalized => '75425165',\n :prefix => '',\n :year_cataloged => nil,\n :serial => '425165',\n :revision_year => 1975,\n :revision_year_encoded => '75',\n :revision_number => nil,\n },\n { :orig => ' 79139101 /AC/r932',\n :canonical => ' 79139101 /AC/r932',\n :normalized => '79139101',\n :prefix => '',\n :year_cataloged => nil,\n :serial => '139101',\n :suffix_encoded => '/AC',\n :revision_year => 1993,\n :revision_year_encoded => '93',\n :revision_number => 2,\n },\n { :orig => '89-4',\n :canonical => ' 89000004 ',\n :normalized => '89000004',\n :year_cataloged => 1989,\n :serial => '000004',\n },\n { :orig => '89-45',\n :canonical => ' 89000045 ',\n :normalized => '89000045',\n :year_cataloged => 1989,\n :serial => '000045',\n },\n { :orig => '89-456',\n :canonical => ' 89000456 ',\n :normalized => '89000456',\n :year_cataloged => 1989,\n :serial => '000456',\n },\n { :orig => '89-1234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => '89-001234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => '89001234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => '2002-1234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => '2002-001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => '2002001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => ' 89001234 ',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => ' 2002001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a89-1234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a89-001234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a89001234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a2002-1234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a2002-001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a2002001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'a 89001234 ',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a 89-001234 ',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'a 2002001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab89-1234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab89-001234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab89001234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab2002-1234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab2002-001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab2002001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab 89001234 ',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'ab 2002001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'ab 89-1234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89-1234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89-001234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89001234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'abc89001234 ',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/89001234',\n :canonical => ' 89001234 ',\n :normalized => '89001234',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/a89001234',\n :canonical => 'a 89001234 ',\n :normalized => 'a89001234',\n :serial => '001234',\n :prefix => 'a',\n :year_cataloged => 1989,\n },\n { :orig => 'http://lccn.loc.gov/ab89001234',\n :canonical => 'ab 89001234 ',\n :normalized => 'ab89001234',\n :prefix => 'ab',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/abc89001234',\n :canonical => 'abc89001234 ',\n :normalized => 'abc89001234',\n :prefix => 'abc',\n :year_cataloged => 1989,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/2002001234',\n :canonical => ' 2002001234',\n :normalized => '2002001234',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/a2002001234',\n :canonical => 'a 2002001234',\n :normalized => 'a2002001234',\n :prefix => 'a',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => 'http://lccn.loc.gov/ab2002001234',\n :canonical => 'ab2002001234',\n :normalized => 'ab2002001234',\n :prefix => 'ab',\n :year_cataloged => 2002,\n :serial => '001234',\n },\n { :orig => '00-21595',\n :canonical => ' 00021595 ',\n :normalized => '00021595',\n :year_cataloged => 2000,\n :serial => '021595',\n },\n { :orig => '2001001599',\n :canonical => ' 2001001599',\n :normalized => '2001001599',\n :year_cataloged => 2001,\n :serial => '001599',\n },\n { :orig => '99-18233',\n :canonical => ' 99018233 ',\n :normalized => '99018233',\n :year_cataloged => 1999,\n :serial => '018233',\n },\n { :orig => '98000595',\n :canonical => ' 98000595 ',\n :normalized => '98000595',\n :year_cataloged => 1898,\n :serial => '000595',\n },\n { :orig => '99005074',\n :canonical => ' 99005074 ',\n :normalized => '99005074',\n :year_cataloged => 1899,\n :serial => '005074',\n },\n { :orig => '00003373',\n :canonical => ' 00003373 ',\n :normalized => '00003373',\n :year_cataloged => 1900,\n :serial => '003373',\n },\n { :orig => '01001599',\n :canonical => ' 01001599 ',\n :normalized => '01001599',\n :year_cataloged => 1901,\n :serial => '001599',\n },\n { :orig => ' 95156543 ',\n :canonical => ' 95156543 ',\n :normalized => '95156543',\n :year_cataloged => 1995,\n :serial => '156543',\n },\n { :orig => ' 94014580 /AC/r95',\n :canonical => ' 94014580 /AC/r95',\n :normalized => '94014580',\n :year_cataloged => 1994,\n :serial => '014580',\n :suffix_encoded => '/AC',\n :revision_year_encoded => '95',\n :revision_year => 1995,\n },\n { :orig => ' 79310919 //r86',\n :canonical => ' 79310919 //r86',\n :normalized => '79310919',\n :year_cataloged => 1979,\n :serial => '310919',\n :revision_year_encoded => '86',\n :revision_year => 1986,\n },\n { :orig => 'gm 71005810 ',\n :canonical => 'gm 71005810 ',\n :normalized => 'gm71005810',\n :prefix => 'gm',\n :year_cataloged => 1971,\n :serial => '005810',\n },\n { :orig => 'sn2006058112 ',\n :canonical => 'sn2006058112',\n :normalized => 'sn2006058112',\n :prefix => 'sn',\n :year_cataloged => 2006,\n :serial => '058112',\n },\n { :orig => 'gm 71-2450',\n :canonical => 'gm 71002450 ',\n :normalized => 'gm71002450',\n :prefix => 'gm',\n :year_cataloged => 1971,\n :serial => '002450',\n },\n { :orig => '2001-1114',\n :canonical => ' 2001001114',\n :normalized => '2001001114',\n :year_cataloged => 2001,\n :serial => '001114',\n },\n ]\n tests.each do |h|\n it \"normalizes #{h[:orig]}\" do\n StdNum::LCCN.normalize(h[:orig]).must_equal h[:normalized], \"#{h[:orig]} doesn't normalize to #{h[:normalized]}\"\n end\n end\nend\n\n"}}
-{"repo": "newism/nsm.tiny_mce.ee_addon", "pr_number": 16, "title": "Avoid developer log warning in EE 2.6 and up", "state": "closed", "merged_at": "2013-07-17T09:42:45Z", "additions": 1, "deletions": 1, "files_changed": ["ft.nsm_tiny_mce.php"], "files_before": {"ft.nsm_tiny_mce.php": " - Technical Director, Newism\n * @copyright \t\tCopyright (c) 2007-2010 Newism \n * @license \t\tCommercial - please see LICENSE file included with this distribution\n * @link\t\t\thttp://ee-garage.com/nsm-tiny-mce\n * @see\t\t\t\thttp://expressionengine.com/public_beta/docs/development/fieldtypes.html\n */\nclass Nsm_tiny_mce_ft extends EE_Fieldtype\n{\n\t/**\n\t * Field info - Required\n\t *\n\t * @access public\n\t * @var array\n\t */\n\tpublic $info = array(\n\t\t'name'\t\t=> 'NSM TinyMCE',\n\t\t'version'\t=> '1.1.1'\n\t);\n\n\t/**\n\t * The field settings array\n\t *\n\t * @access public\n\t * @var array\n\t */\n\tpublic $settings = array();\n\n\t/**\n\t * Path to the TinyMCE config files. Set in the constructor\n\t *\n\t * @access private\n\t * @var string\n\t */\n\tprivate $tiny_mce_config_path = \"\";\n\n\t/**\n\t * The field type - used for form field prefixes. Must be unique and match the class name. Set in the constructor\n\t *\n\t * @access private\n\t * @var string\n\t */\n\tprivate $field_type = '';\n\n\t/**\n\t * Constructor\n\t *\n\t * @access public\n\t *\n\t * Calls the parent constructor\n\t * Sets the tiny_mce_config_path using the PATH_THRID variable\n\t */\n\tpublic function __construct()\n\t{\n\t\tparent::EE_Fieldtype();\n\n\t\t$this->tiny_mce_config_path = PATH_THIRD_THEMES . \"nsm_tiny_mce/scripts/tiny_mce_config/\";\n\n\t\t$this->field_type = $this->addon_id = strtolower(substr(__CLASS__, 0, -3));\n\n\t\tif(!isset($this->EE->session->cache[__CLASS__]))\n\t\t{\n\t\t\t$this->EE->session->cache[__CLASS__]['loaded_configs'] = array();\n\t\t}\n\t}\n\n\t/**\n\t * Replaces the custom field tag\n\t *\n\t * @access public\n\t * @param $data string Contains the field data (or prepped data, if using pre_process)\n\t * @param $params array Contains field parameters (if any)\n\t * @param $tagdata mixed Contains data between tag (for tag pairs) FALSE for single tags\n\t * @return string The HTML replacing the tag\n\t *\n\t */\n\tpublic function replace_tag($data, $params = FALSE, $tagdata = FALSE)\n\t{\n\t\treturn $data;\n\t}\n\n\t/**\n\t * Display the field in the publish form\n\t *\n\t * @access public\n\t * @param $data String Contains the current field data. Blank for new entries.\n\t * @return String The custom field HTML\n\t *\n\t * Includes the TinyMCE base script and the field specific configuration.\n\t * Returns a standard textarea with a configuration specific class\n\t */\n\tpublic function display_field($data, $field_id = false)\n\t{\n\t\t$this->_addConfJs($this->settings[\"conf\"]);\n\n\t\tif(!$field_id)\n\t\t\t$field_id = $this->field_name;\n\n\t\t$this->EE->cp->add_to_foot('');\n\n\t\treturn form_textarea(array(\n\t\t\t'name'\t=> $this->field_name,\n\t\t\t'id'\t=> $field_id,\n\t\t\t'value'\t=> $data,\n\t\t\t'rows' => ' ',\n\t\t\t'style' => \"height: {$this->settings['height']}px\"\n\t\t));\n\t}\n\n\t/**\n\t * Displays the cell\n\t *\n\t * @access public\n\t * @param $data The cell data\n\t */\n\tpublic function display_cell($data)\n\t{\n\t\t$this->_addConfJs($this->settings[\"conf\"]);\n\n\t\tif(!isset($this->EE->session->cache[__CLASS__]['cell_js_loaded']))\n\t\t{\n\t\t\t$theme_url = $this->_getThemeUrl();\n\t\t\t$this->EE->cp->add_to_foot(\"\");\n\t\t\t$this->EE->session->cache[__CLASS__]['cell_js_loaded'] = TRUE;\n\t\t}\n\n\t\t$this->EE->cp->add_to_foot('');\n\n\t\treturn '';\n\t}\n\n\t/**\n\t * Displays the Low Variable field\n\t *\n\t * @access public\n\t * @param $var_data The variable data\n\t * @see http://loweblog.com/software/low-variables/docs/fieldtype-bridge/\n\t */\n\tpublic function display_var_field($var_data)\n\t{\n\t\treturn $this->display_field($var_data, \"nsm_tiny_mce_\" . substr($this->field_name, 4, 1));\n\t}\n\n\t/**\n\t * Publish form validation\n\t *\n\t * @param $data array Contains the submitted field data.\n\t * @return mixed TRUE or an error message\n\t */\n\tpublic function validate($data)\n\t{\n\t\treturn TRUE;\n\t}\n\n\t/**\n\t * Default field settings\n\t *\n\t * @access private\n\t * @return The default field settings\n\t */\n\tprivate function _defaultFieldSettings(){\n\t\treturn array(\n\t\t\t\"conf\" => FALSE,\n\t\t\t\"height\" => 300\n\t\t);\n\t}\n\n\t/**\n\t * Save the custom field settings\n\t *\n\t * @param $data array Not sure what this is yet, probably the submitted post data.\n\t * @return boolean Valid or not\n\t */\n\tpublic function save_settings($field_settings)\n\t{\n\t\t$field_settings = array_merge($this->_defaultFieldSettings(), $this->EE->input->post('nsm_tiny_mce'));\n\n\t\t// Force formatting\n\t\t$field_settings['field_fmt'] = 'none';\n\t\t$field_settings['field_show_fmt'] = 'n';\n\t\t$field_settings['field_type'] = 'nsm_tiny_mce';\n\n\t\t// Cleanup\n\t\tunset($_POST['nsm_tiny_mce']);\n\t\tforeach (array_keys($field_settings) as $setting)\n\t\t{\n\t\t\tif (isset($_POST[\"nsm_tiny_mce_{$setting}\"]))\n\t\t\t{\n\t\t\t\tunset($_POST[\"nsm_tiny_mce_{$setting}\"]);\n\t\t\t}\n\t\t}\n\n\t\treturn $field_settings;\n\t}\n\n\t/**\n\t * Process the cell settings before saving\n\t *\n\t * @access public\n\t * @param $col_settings array The settings for the column\n\t * @return array The new settings\n\t */\n\tpublic function save_cell_settings($col_settings)\n\t{\n\t\t$col_settings = $col_settings['nsm_tiny_mce'];\n\t\treturn $col_settings;\n\t}\n\n\t/**\n\t * Save the Low variable settings\n\t *\n\t * @access public\n\t * @param $var_settings The variable settings\n\t * @see http://loweblog.com/software/low-variables/docs/fieldtype-bridge/\n\t */\n\tpublic function save_var_settings($var_settings)\n\t{\n\t\treturn $this->EE->input->post('nsm_tiny_mce');\n\t}\n\n\n\n\n\t/**\n\t * Prepares settings array for fields and matrix cells\n\t *\n\t * @access public\n\t * @param $settings array The field / cell settings\n\t * @return array Labels and form inputs\n\t */\n\tprivate function _fieldSettings($settings)\n\t{\n\t\t$r = array();\n\n\t\t// TinyMCE height\n\t\t$r[] = array(\n\t\t\tlang('Height in px', 'nsm_tiny_mce_height'),\n\t\t\tform_input(\"nsm_tiny_mce[height]\", $settings['height'], \"id='nsm_tiny_mce_height' class='matrix-textarea'\")\n\t\t);\n\n\t\t// Configs\n\t\tif($configs = $this->_readTinyMCEConfigs())\n\t\t{\n\t\t\tforeach ($configs as $key => $value)\n\t\t\t{\n\t\t\t\t$options[$key] = ucfirst(str_replace(array(\"_\", \".js\"), array(\"\"), $key));\n\t\t\t}\n\t\t\t$confs = form_dropdown(\"nsm_tiny_mce[conf]\", $options, $settings['conf'], \"id='nsm_tiny_mce_conf'\");\n\t\t}\n\t\telse\n\t\t{\n\t\t\t$confs = \"
\n\t\t\t\t\t\t\tNo configuration files could be found. Check that\n\t\t\t\t\t\t\t\".$this->tiny_mce_config_path.\"\n\t\t\t\t\t\t\tis readable and contains at least one configuration file.\n\t\t\t\t\t\t
\";\n\t\t\t$confs .= form_hidden(\"nsm_tiny_mce[conf]\", '');\n\t\t}\n\n\t\t$r[] = array(\n\t\t\t\t\tlang('Configuration', 'nsm_tiny_mce_conf'),\n\t\t\t\t\t$confs\n\t\t\t\t);\n\n\t\treturn $r;\n\t}\n\n\t/**\n\t * Display the settings form for each custom field\n\t *\n\t * @access public\n\t * @param $data mixed Not sure what this data is yet :S\n\t * @return string Override the field custom settings with custom html\n\t *\n\t * In this case we add an extra row to the table. Not sure how the table is built\n\t */\n\tpublic function display_settings($field_settings)\n\t{\n\t\t$field_settings = array_merge($this->_defaultFieldSettings(), $field_settings);\n\t\t$rows = $this->_fieldSettings($field_settings);\n\n\t\t// add the rows\n\t\tforeach ($rows as $row)\n\t\t{\n\t\t\t$this->EE->table->add_row($row[0], $row[1]);\n\t\t}\n\t}\n\n\t/**\n\t * Display Cell Settings\n\t *\n\t * @access public\n\t * @param $cell_settings array The cell settings\n\t * @return array Label and form inputs\n\t */\n\tpublic function display_cell_settings($cell_settings)\n\t{\n\t\t$cell_settings = array_merge($this->_defaultFieldSettings(), $cell_settings);\n\t\treturn $this->_fieldSettings($cell_settings);\n\t}\n\n\t/**\n\t * Display Variable Settings\n\t *\n\t * @access public\n\t * @param $var_settings array The variable settings\n\t * @return array Label and form inputs\n\t */\n\tpublic function display_var_settings($var_settings)\n\t{\n\t\t$var_settings = array_merge($this->_defaultFieldSettings(), $var_settings);\n\t\treturn $this->_fieldSettings($var_settings);\n\t}\n\n\t/**\n\t * Adds the TinyMCE configuration to the CP\n\t */\n\tprivate function _addConfJs($conf, $cell = FALSE)\n\t{\n\n\t\t$theme_url = $this->_getThemeUrl();\n\n\t\tif(!isset($this->EE->session->cache[__CLASS__]['tiny_mce_loaded']))\n\t\t{\n\t\t\t$this->EE->cp->add_to_head(\"\");\n\n\t\t\t$this->EE->cp->add_to_foot(\"\");\n\t\t\t$this->EE->cp->add_to_foot('');\n\t\t\t$this->EE->session->cache[__CLASS__]['tiny_mce_loaded'] = TRUE;\n\t\t}\n\n\t\tif(!in_array($conf, $this->EE->session->cache[__CLASS__]['loaded_configs']))\n\t\t{\n\t\t\t$this->EE->session->cache[__CLASS__]['loaded_configs'][] = $conf;\n\t\t\t$this->EE->cp->add_to_foot(\"\");\n\n\t\t}\n\t}\n\n\t/**\n\t * Get the current themes URL from the theme folder + / + the addon id\n\t *\n\t * @access private\n\t * @return string The theme URL\n\t */\n\tprivate function _getThemeUrl()\n\t{\n\t\t$EE =& get_instance();\n\t\tif(!isset($EE->session->cache[$this->addon_id]['theme_url']))\n\t\t{\n\t\t\t$theme_url = URL_THIRD_THEMES;\n\t\t\tif (substr($theme_url, -1) != '/') $theme_url .= '/';\n\t\t\t$theme_url .= $this->addon_id;\n\t\t\t$EE->session->cache[$this->addon_id]['theme_url'] = $theme_url;\n\t\t}\n\t\treturn $EE->session->cache[$this->addon_id]['theme_url'];\n\t}\n\n\t/**\n\t * Reads the custom TinyMCE configs from the directory\n\t *\n\t * @access private\n\t * @return mixed FALSE if no configs are found or an array of filenames\n\t */\n\tprivate function _readTinyMCEConfigs()\n\t{\n\t\t// have the configs been processed?\n\t\tif(isset($this->EE->session->cache[__CLASS__]['tiny_mce_configs']) === FALSE)\n\t\t{\n\t\t\t// assume there are no configs\n\t\t\t$configs = FALSE;\n\t\t\t// if the provided string an actual directory\n\t\t\tif(is_dir($dir = $this->tiny_mce_config_path))\n\t\t\t{\n\t\t\t\t// open the directory and assign it to a handle\n\t\t\t\t$dir_handle = opendir($dir);\n\t\t\t\t// if there is a dir handle\n\t\t\t\tif($dir_handle)\n\t\t\t\t{\n\t\t\t\t\t/* This is the correct way to loop over the directory. */\n\t\t \t\t// loop over the files\n\t\t\t\t\twhile (false !== ($file = readdir($dir_handle)))\n\t\t\t\t\t{\n\t\t\t\t\t\t// if this is a real file\n\t\t\t\t\t\tif ($file != \".\" && $file != \"..\" && $file != \"Thumb.db\" && substr($file, 0, 1) != '-' && substr($file, -3) == \".js\")\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t// add the config to the list\n\t\t\t\t\t\t\t$configs[$file] = file_get_contents($dir.$file);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t// assign the configs to a session var\n\t\t\t$this->EE->session->cache[__CLASS__]['tiny_mce_configs'] = $configs;\n\t\t}\n\n\t\t// return the session var\n\t\treturn $this->EE->session->cache[__CLASS__]['tiny_mce_configs'];\n\t}\n\n}\n//END CLASS"}, "files_after": {"ft.nsm_tiny_mce.php": " - Technical Director, Newism\n * @copyright \t\tCopyright (c) 2007-2010 Newism \n * @license \t\tCommercial - please see LICENSE file included with this distribution\n * @link\t\t\thttp://ee-garage.com/nsm-tiny-mce\n * @see\t\t\t\thttp://expressionengine.com/public_beta/docs/development/fieldtypes.html\n */\nclass Nsm_tiny_mce_ft extends EE_Fieldtype\n{\n\t/**\n\t * Field info - Required\n\t *\n\t * @access public\n\t * @var array\n\t */\n\tpublic $info = array(\n\t\t'name'\t\t=> 'NSM TinyMCE',\n\t\t'version'\t=> '1.1.1'\n\t);\n\n\t/**\n\t * The field settings array\n\t *\n\t * @access public\n\t * @var array\n\t */\n\tpublic $settings = array();\n\n\t/**\n\t * Path to the TinyMCE config files. Set in the constructor\n\t *\n\t * @access private\n\t * @var string\n\t */\n\tprivate $tiny_mce_config_path = \"\";\n\n\t/**\n\t * The field type - used for form field prefixes. Must be unique and match the class name. Set in the constructor\n\t *\n\t * @access private\n\t * @var string\n\t */\n\tprivate $field_type = '';\n\n\t/**\n\t * Constructor\n\t *\n\t * @access public\n\t *\n\t * Calls the parent constructor\n\t * Sets the tiny_mce_config_path using the PATH_THRID variable\n\t */\n\tpublic function __construct()\n\t{\n\t\tparent::__construct();\n\n\t\t$this->tiny_mce_config_path = PATH_THIRD_THEMES . \"nsm_tiny_mce/scripts/tiny_mce_config/\";\n\n\t\t$this->field_type = $this->addon_id = strtolower(substr(__CLASS__, 0, -3));\n\n\t\tif(!isset($this->EE->session->cache[__CLASS__]))\n\t\t{\n\t\t\t$this->EE->session->cache[__CLASS__]['loaded_configs'] = array();\n\t\t}\n\t}\n\n\t/**\n\t * Replaces the custom field tag\n\t *\n\t * @access public\n\t * @param $data string Contains the field data (or prepped data, if using pre_process)\n\t * @param $params array Contains field parameters (if any)\n\t * @param $tagdata mixed Contains data between tag (for tag pairs) FALSE for single tags\n\t * @return string The HTML replacing the tag\n\t *\n\t */\n\tpublic function replace_tag($data, $params = FALSE, $tagdata = FALSE)\n\t{\n\t\treturn $data;\n\t}\n\n\t/**\n\t * Display the field in the publish form\n\t *\n\t * @access public\n\t * @param $data String Contains the current field data. Blank for new entries.\n\t * @return String The custom field HTML\n\t *\n\t * Includes the TinyMCE base script and the field specific configuration.\n\t * Returns a standard textarea with a configuration specific class\n\t */\n\tpublic function display_field($data, $field_id = false)\n\t{\n\t\t$this->_addConfJs($this->settings[\"conf\"]);\n\n\t\tif(!$field_id)\n\t\t\t$field_id = $this->field_name;\n\n\t\t$this->EE->cp->add_to_foot('');\n\n\t\treturn form_textarea(array(\n\t\t\t'name'\t=> $this->field_name,\n\t\t\t'id'\t=> $field_id,\n\t\t\t'value'\t=> $data,\n\t\t\t'rows' => ' ',\n\t\t\t'style' => \"height: {$this->settings['height']}px\"\n\t\t));\n\t}\n\n\t/**\n\t * Displays the cell\n\t *\n\t * @access public\n\t * @param $data The cell data\n\t */\n\tpublic function display_cell($data)\n\t{\n\t\t$this->_addConfJs($this->settings[\"conf\"]);\n\n\t\tif(!isset($this->EE->session->cache[__CLASS__]['cell_js_loaded']))\n\t\t{\n\t\t\t$theme_url = $this->_getThemeUrl();\n\t\t\t$this->EE->cp->add_to_foot(\"\");\n\t\t\t$this->EE->session->cache[__CLASS__]['cell_js_loaded'] = TRUE;\n\t\t}\n\n\t\t$this->EE->cp->add_to_foot('');\n\n\t\treturn '';\n\t}\n\n\t/**\n\t * Displays the Low Variable field\n\t *\n\t * @access public\n\t * @param $var_data The variable data\n\t * @see http://loweblog.com/software/low-variables/docs/fieldtype-bridge/\n\t */\n\tpublic function display_var_field($var_data)\n\t{\n\t\treturn $this->display_field($var_data, \"nsm_tiny_mce_\" . substr($this->field_name, 4, 1));\n\t}\n\n\t/**\n\t * Publish form validation\n\t *\n\t * @param $data array Contains the submitted field data.\n\t * @return mixed TRUE or an error message\n\t */\n\tpublic function validate($data)\n\t{\n\t\treturn TRUE;\n\t}\n\n\t/**\n\t * Default field settings\n\t *\n\t * @access private\n\t * @return The default field settings\n\t */\n\tprivate function _defaultFieldSettings(){\n\t\treturn array(\n\t\t\t\"conf\" => FALSE,\n\t\t\t\"height\" => 300\n\t\t);\n\t}\n\n\t/**\n\t * Save the custom field settings\n\t *\n\t * @param $data array Not sure what this is yet, probably the submitted post data.\n\t * @return boolean Valid or not\n\t */\n\tpublic function save_settings($field_settings)\n\t{\n\t\t$field_settings = array_merge($this->_defaultFieldSettings(), $this->EE->input->post('nsm_tiny_mce'));\n\n\t\t// Force formatting\n\t\t$field_settings['field_fmt'] = 'none';\n\t\t$field_settings['field_show_fmt'] = 'n';\n\t\t$field_settings['field_type'] = 'nsm_tiny_mce';\n\n\t\t// Cleanup\n\t\tunset($_POST['nsm_tiny_mce']);\n\t\tforeach (array_keys($field_settings) as $setting)\n\t\t{\n\t\t\tif (isset($_POST[\"nsm_tiny_mce_{$setting}\"]))\n\t\t\t{\n\t\t\t\tunset($_POST[\"nsm_tiny_mce_{$setting}\"]);\n\t\t\t}\n\t\t}\n\n\t\treturn $field_settings;\n\t}\n\n\t/**\n\t * Process the cell settings before saving\n\t *\n\t * @access public\n\t * @param $col_settings array The settings for the column\n\t * @return array The new settings\n\t */\n\tpublic function save_cell_settings($col_settings)\n\t{\n\t\t$col_settings = $col_settings['nsm_tiny_mce'];\n\t\treturn $col_settings;\n\t}\n\n\t/**\n\t * Save the Low variable settings\n\t *\n\t * @access public\n\t * @param $var_settings The variable settings\n\t * @see http://loweblog.com/software/low-variables/docs/fieldtype-bridge/\n\t */\n\tpublic function save_var_settings($var_settings)\n\t{\n\t\treturn $this->EE->input->post('nsm_tiny_mce');\n\t}\n\n\n\n\n\t/**\n\t * Prepares settings array for fields and matrix cells\n\t *\n\t * @access public\n\t * @param $settings array The field / cell settings\n\t * @return array Labels and form inputs\n\t */\n\tprivate function _fieldSettings($settings)\n\t{\n\t\t$r = array();\n\n\t\t// TinyMCE height\n\t\t$r[] = array(\n\t\t\tlang('Height in px', 'nsm_tiny_mce_height'),\n\t\t\tform_input(\"nsm_tiny_mce[height]\", $settings['height'], \"id='nsm_tiny_mce_height' class='matrix-textarea'\")\n\t\t);\n\n\t\t// Configs\n\t\tif($configs = $this->_readTinyMCEConfigs())\n\t\t{\n\t\t\tforeach ($configs as $key => $value)\n\t\t\t{\n\t\t\t\t$options[$key] = ucfirst(str_replace(array(\"_\", \".js\"), array(\"\"), $key));\n\t\t\t}\n\t\t\t$confs = form_dropdown(\"nsm_tiny_mce[conf]\", $options, $settings['conf'], \"id='nsm_tiny_mce_conf'\");\n\t\t}\n\t\telse\n\t\t{\n\t\t\t$confs = \"
\n\t\t\t\t\t\t\tNo configuration files could be found. Check that\n\t\t\t\t\t\t\t\".$this->tiny_mce_config_path.\"\n\t\t\t\t\t\t\tis readable and contains at least one configuration file.\n\t\t\t\t\t\t
\";\n\t\t\t$confs .= form_hidden(\"nsm_tiny_mce[conf]\", '');\n\t\t}\n\n\t\t$r[] = array(\n\t\t\t\t\tlang('Configuration', 'nsm_tiny_mce_conf'),\n\t\t\t\t\t$confs\n\t\t\t\t);\n\n\t\treturn $r;\n\t}\n\n\t/**\n\t * Display the settings form for each custom field\n\t *\n\t * @access public\n\t * @param $data mixed Not sure what this data is yet :S\n\t * @return string Override the field custom settings with custom html\n\t *\n\t * In this case we add an extra row to the table. Not sure how the table is built\n\t */\n\tpublic function display_settings($field_settings)\n\t{\n\t\t$field_settings = array_merge($this->_defaultFieldSettings(), $field_settings);\n\t\t$rows = $this->_fieldSettings($field_settings);\n\n\t\t// add the rows\n\t\tforeach ($rows as $row)\n\t\t{\n\t\t\t$this->EE->table->add_row($row[0], $row[1]);\n\t\t}\n\t}\n\n\t/**\n\t * Display Cell Settings\n\t *\n\t * @access public\n\t * @param $cell_settings array The cell settings\n\t * @return array Label and form inputs\n\t */\n\tpublic function display_cell_settings($cell_settings)\n\t{\n\t\t$cell_settings = array_merge($this->_defaultFieldSettings(), $cell_settings);\n\t\treturn $this->_fieldSettings($cell_settings);\n\t}\n\n\t/**\n\t * Display Variable Settings\n\t *\n\t * @access public\n\t * @param $var_settings array The variable settings\n\t * @return array Label and form inputs\n\t */\n\tpublic function display_var_settings($var_settings)\n\t{\n\t\t$var_settings = array_merge($this->_defaultFieldSettings(), $var_settings);\n\t\treturn $this->_fieldSettings($var_settings);\n\t}\n\n\t/**\n\t * Adds the TinyMCE configuration to the CP\n\t */\n\tprivate function _addConfJs($conf, $cell = FALSE)\n\t{\n\n\t\t$theme_url = $this->_getThemeUrl();\n\n\t\tif(!isset($this->EE->session->cache[__CLASS__]['tiny_mce_loaded']))\n\t\t{\n\t\t\t$this->EE->cp->add_to_head(\"\");\n\n\t\t\t$this->EE->cp->add_to_foot(\"\");\n\t\t\t$this->EE->cp->add_to_foot('');\n\t\t\t$this->EE->session->cache[__CLASS__]['tiny_mce_loaded'] = TRUE;\n\t\t}\n\n\t\tif(!in_array($conf, $this->EE->session->cache[__CLASS__]['loaded_configs']))\n\t\t{\n\t\t\t$this->EE->session->cache[__CLASS__]['loaded_configs'][] = $conf;\n\t\t\t$this->EE->cp->add_to_foot(\"\");\n\n\t\t}\n\t}\n\n\t/**\n\t * Get the current themes URL from the theme folder + / + the addon id\n\t *\n\t * @access private\n\t * @return string The theme URL\n\t */\n\tprivate function _getThemeUrl()\n\t{\n\t\t$EE =& get_instance();\n\t\tif(!isset($EE->session->cache[$this->addon_id]['theme_url']))\n\t\t{\n\t\t\t$theme_url = URL_THIRD_THEMES;\n\t\t\tif (substr($theme_url, -1) != '/') $theme_url .= '/';\n\t\t\t$theme_url .= $this->addon_id;\n\t\t\t$EE->session->cache[$this->addon_id]['theme_url'] = $theme_url;\n\t\t}\n\t\treturn $EE->session->cache[$this->addon_id]['theme_url'];\n\t}\n\n\t/**\n\t * Reads the custom TinyMCE configs from the directory\n\t *\n\t * @access private\n\t * @return mixed FALSE if no configs are found or an array of filenames\n\t */\n\tprivate function _readTinyMCEConfigs()\n\t{\n\t\t// have the configs been processed?\n\t\tif(isset($this->EE->session->cache[__CLASS__]['tiny_mce_configs']) === FALSE)\n\t\t{\n\t\t\t// assume there are no configs\n\t\t\t$configs = FALSE;\n\t\t\t// if the provided string an actual directory\n\t\t\tif(is_dir($dir = $this->tiny_mce_config_path))\n\t\t\t{\n\t\t\t\t// open the directory and assign it to a handle\n\t\t\t\t$dir_handle = opendir($dir);\n\t\t\t\t// if there is a dir handle\n\t\t\t\tif($dir_handle)\n\t\t\t\t{\n\t\t\t\t\t/* This is the correct way to loop over the directory. */\n\t\t \t\t// loop over the files\n\t\t\t\t\twhile (false !== ($file = readdir($dir_handle)))\n\t\t\t\t\t{\n\t\t\t\t\t\t// if this is a real file\n\t\t\t\t\t\tif ($file != \".\" && $file != \"..\" && $file != \"Thumb.db\" && substr($file, 0, 1) != '-' && substr($file, -3) == \".js\")\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t// add the config to the list\n\t\t\t\t\t\t\t$configs[$file] = file_get_contents($dir.$file);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t// assign the configs to a session var\n\t\t\t$this->EE->session->cache[__CLASS__]['tiny_mce_configs'] = $configs;\n\t\t}\n\n\t\t// return the session var\n\t\treturn $this->EE->session->cache[__CLASS__]['tiny_mce_configs'];\n\t}\n\n}\n//END CLASS"}}
-{"repo": "fabriceb/sfFacebookConnectPlugin", "pr_number": 7, "title": "Doc + facebook object instanciation", "state": "closed", "merged_at": "2010-11-24T13:51:29Z", "additions": 47, "deletions": 7, "files_changed": ["lib/sfFacebook.class.php"], "files_before": {"lib/sfFacebook.class.php": ": new Facebook php-sdk\n */\n\n\n public static function getFacebookCookie()\n {\n $app_id = self::getApiKey();\n $application_secret = self::getApiSecret();\n $args = array();\n if (!isset($_COOKIE['fbs_' . $app_id]))\n {\n return null;\n }\n parse_str(trim($_COOKIE['fbs_' . $app_id], '\\\\\"'), $args);\n ksort($args);\n $payload = '';\n foreach ($args as $key => $value)\n {\n if ($key != 'sig')\n {\n $payload .= $key . '=' . $value;\n }\n }\n if (md5($payload . $application_secret) != $args['sig'])\n {\n return null;\n }\n return $args;\n }\n\n\n public static function getFacebookClient()\n {\n if (self::$client === null)\n {\n $params = array(\n 'appId' => self::getApiKey(),\n 'secret' => self::getApiSecret(),\n 'cookie' => self::getApiCookie(),\n 'domain' => self::getApiDomain()\n );\n\n self::$client = new Facebook($params);\n }\n\n if (!self::$client)\n {\n error_log('Could not create facebook client.');\n }\n\n return self::$client;\n }\n\n /**\n * get the facebook session\n *\n * @return Array\n * @author Benjamin Grandfond \n * @since 2010-05-13\n */\n public static function getSession()\n {\n\n return self::getFacebookClient()->getSession();\n }\n\n /**\n * get the facebook user\n *\n * @return Array\n * @author Benjamin Grandfond \n * @since 2010-05-13\n */\n public static function getUser()\n {\n\n return self::getFacebookClient()->getUser();\n }\n\n /**\n * get datas through the faceook graph api\n * @see http://developers.facebook.com/docs/api\n *\n * @return Array\n * @author Benjamin Grandfond \n * @since 2010-05-13\n */\n public static function getFacebookApi($param)\n {\n\n return self::getFacebookClient()->api($param);\n }\n\n /**\n * gets the facebook api key\n *\n * @return Facebook\n * @author fabriceb\n * @since 2009-05-17\n */\n public static function getApiKey()\n {\n\n return sfConfig::get('app_facebook_api_key');\n }\n\n /**\n * gets the facebook api secret\n *\n * @return Facebook\n * @author fabriceb\n * @since 2009-05-17\n */\n public static function getApiSecret()\n {\n\n return sfConfig::get('app_facebook_api_secret');\n }\n\n /**\n * get the facebook app cookie support\n *\n * @return Boolean\n * @author Benjamin Grandfond \n * @since 2010-05-12\n */\n public static function getApiCookie()\n {\n\n return sfConfig::get('app_facebook_api_cookie', true);\n }\n\n /**\n * get the facebook app domain\n *\n * @return Boolean\n * @author Benjamin Grandfond \n * @since 2010-05-12\n */\n public static function getApiDomain()\n {\n\n return sfConfig::get('app_facebook_api_domain', null);\n }\n\n /**\n * gets or create user with facebook uid inprofile\n *\n * @param Integer $facebook_uid\n * @param boolean $isActive\n * @return sfGuardUser $sfGuardUser\n */\n public static function getOrCreateUserByFacebookUid($facebook_uid, $isActive = true)\n {\n $sfGuardUser = self::getGuardAdapter()->getSfGuardUserByFacebookUid($facebook_uid, $isActive);\n\n if (!$sfGuardUser instanceof sfGuardUser)\n {\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} No user exists with current email hash');\n }\n $sfGuardUser = self::getGuardAdapter()->createSfGuardUserWithFacebookUid($facebook_uid);\n }\n\n return $sfGuardUser;\n }\n\n /**\n * gets user with facebook uid inprofile\n *\n * @param Integer $facebook_uid\n * @param boolean $isActive\n * @return sfGuardUser $sfGuardUser\n */\n public static function getUserByFacebookUid($facebook_uid, $isActive = true)\n {\n $sfGuardUser = self::getGuardAdapter()->retrieveSfGuardUserByFacebookUid($facebook_uid, $isActive);\n\n if (!$sfGuardUser instanceof sfGuardUser)\n {\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} No user exists with current email hash');\n }\n }\n\n return $sfGuardUser;\n }\n\n /**\n * Gets the currently logged sfGuardUser using Facebook Session\n *\n * @param boolean $create whether to automatically create a sfGuardUser\n * if none found corresponding to the Facebook session\n * @param boolean $isActive\n * @return sfGuardUser\n * @author fabriceb\n * @since 2009-05-17\n * @since 2009-08-25\n */\n public static function getSfGuardUserByFacebookSession($create = true, $isActive = true)\n {\n // We get the facebook uid from session\n $fb_uid = self::getAnyFacebookUid();\n if ($fb_uid)\n {\n\n if ($create)\n {\n\n return self::getOrCreateUserByFacebookUid($fb_uid, $isActive);\n }\n else\n {\n\n return self::getUserByFacebookUid($fb_uid, $isActive);\n }\n }\n\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} No current Facebook session');\n }\n\n return null;\n }\n\n /**\n * checks the existence of the HTTP_X_FB_USER_REMOTE_ADDR porperty in the header\n * which is a sign of being included by the fbml interface\n *\n * @return boolean\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function isInsideFacebook()\n {\n\n return isset($_SERVER['HTTP_X_FB_USER_REMOTE_ADDR']);\n }\n\n /**\n *\n * @return boolean\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function inCanvas()\n {\n\n return self::getFacebookClient()->in_fb_canvas();\n }\n\n /**\n * redirects to the login page of the Facebook application if not logged yet\n *\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function requireLogin()\n {\n self::getFacebookClient()->require_login();\n }\n\n /**\n * redirects depnding on in canvas or not\n *\n * @param $url\n * @param $statusCode\n * @return mixed sfView::NONE or sfStopException\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function redirect($url, $statusCode = 302)\n {\n\n $context = sfContext::getInstance();\n $response = $context->getResponse();\n\n if (self::inCanvas())\n {\n $url = sfConfig::get('app_facebook_app_url').$context->getController()->genUrl($url, false);\n\n $text = '';\n $response->setContent(sfContext::getInstance()->getResponse()->getContent().$text);\n\n return sfView::NONE;\n }\n else\n {\n $fb_parameters = '?'.sfFacebook::getFacebookSigParameters(sfContext::getInstance()->getRequest());\n $url = $context->getController()->genUrl($url, true).$fb_parameters;\n\n $response->clearHttpHeaders();\n $response->setStatusCode($statusCode);\n $response->setHttpHeader('Location', $url);\n $response->setContent(sprintf('', 0, htmlspecialchars($url, ENT_QUOTES, sfConfig::get('sf_charset'))));\n $response->send();\n\n throw new sfStopException();\n }\n }\n\n /**\n *\n * @param integer $user_uid\n * @return integer[]\n * @author fabriceb\n * @since Jun 9, 2009 fabriceb\n */\n public static function getFacebookFriendsUids($user_uid = null)\n {\n\n try\n {\n $friends_uids = self::getFacebookApi()->friends_get(null, $user_uid);\n }\n catch(FacebookRestClientException $e)\n {\n $friends_uids = array();\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{FacebookRestClientException} '.$e->getMessage());\n }\n }\n\n return $friends_uids;\n }\n\n /**\n *\n * @return sfFacebookGuardAdapter\n * @author fabriceb\n * @since Aug 10, 2009\n * @since 2009-10-08 Alban Creton : added configurability of the Guard Adapter.\n */\n public static function getGuardAdapter()\n {\n if (self::$guard_adapter === null)\n {\n if(sfConfig::get('app_facebook_guard_adapter') && class_exists(sfConfig::get('app_facebook_guard_adapter'), true))\n {\n $class = sfConfig::get('app_facebook_guard_adapter');\n }\n else if (class_exists('sfGuardUserPeer', true))\n {\n $class = 'sfFacebookPropelGuardAdapter';\n }\n else\n {\n $class = 'sfFacebookDoctrineGuardAdapter';\n }\n self::$guard_adapter = new $class();\n }\n if (!self::$guard_adapter)\n {\n error_log('Could not create guard adapter.');\n }\n\n return self::$guard_adapter;\n }\n\n\n /**\n *\n * @return boolean\n * @author fabriceb\n * @since Aug 27, 2009\n */\n public static function isJsLoaded()\n {\n\n return self::$is_js_loaded;\n }\n\n /**\n *\n * @return void\n * @author fabriceb\n * @since Aug 27, 2009\n */\n public static function setJsLoaded()\n {\n self::$is_js_loaded = true;\n }\n\n /**\n * Dirty way to convert fr into fr_FR\n * @param string $culture\n * @return string\n * @author fabriceb\n * @since Aug 28, 2009\n */\n public static function getLocale($culture = null)\n {\n if (is_null($culture))\n {\n $culture = sfContext::getInstance()->getUser()->getCulture();\n }\n\n $culture_to_locale = array(\n 'fr' => 'fr_FR',\n 'en' => 'en_US',\n 'de' => 'de_DE',\n 'it' => 'it_IT',\n );\n\n return array_key_exists($culture, $culture_to_locale) ? $culture_to_locale[$culture] : $culture;\n }\n\n /**\n * @return interger $facebook_uid\n * @author fabriceb\n * @since Oct 6, 2009\n */\n public static function getAnyFacebookUid()\n {\n\n $cookie = self::getFacebookCookie();\n $fb_uid = $cookie['uid'];\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} Fb_uid from cookie : '.$fb_uid);\n\n return $fb_uid;\n }\n\n /**\n *\n * @param sfWebRequest $request\n * @return string\n * @author fabriceb\n * @since Oct 12, 2009\n */\n public static function getFacebookSigParameters(sfWebRequest $request)\n {\n $parameters = $request->getParameterHolder()->getAll();\n\n $parameter_string = '';\n foreach ($parameters as $key => $parameter)\n {\n if (substr($key,0,3)=='fb_')\n {\n $parameter_string .= '&'.$key.'='.$parameter;\n }\n }\n\n return $parameter_string;\n }\n\n}\n"}, "files_after": {"lib/sfFacebook.class.php": " $value)\n {\n if ($key != 'sig')\n {\n $payload .= $key . '=' . $value;\n }\n }\n if (md5($payload . $application_secret) != $args['sig'])\n {\n return null;\n }\n\n return $args;\n }\n\n /**\n * gets the facebook client instance\n *\n * @return Facebook\n * @author fabriceb\n * @since 2009-05-17\n * @since 2010-05-12 Benjamin Grandfond : new Facebook php-sdk\n * @since 2010-09-03 Benjamin Grandfond : correct the parameters sent to the facebook class constructor\n */\n\n\n public static function getFacebookCookie()\n {\n $app_id = self::getApiKey();\n $application_secret = self::getApiSecret();\n $args = array();\n if (!isset($_COOKIE['fbs_' . $app_id]))\n {\n return null;\n }\n parse_str(trim($_COOKIE['fbs_' . $app_id], '\\\\\"'), $args);\n ksort($args);\n $payload = '';\n foreach ($args as $key => $value)\n {\n if ($key != 'sig')\n {\n $payload .= $key . '=' . $value;\n }\n }\n if (md5($payload . $application_secret) != $args['sig'])\n {\n return null;\n }\n return $args;\n }\n\n\n public static function getFacebookClient()\n {\n if (self::$client === null)\n {\n $params = array(\n 'appId' => self::getApiId(),\n 'secret' => self::getApiSecret(),\n 'cookie' => self::getApiCookie(),\n 'domain' => self::getApiDomain()\n );\n\n self::$client = new Facebook($params);\n }\n\n if (!self::$client)\n {\n error_log('Could not create facebook client.');\n }\n\n return self::$client;\n }\n\n /**\n * get the facebook session\n *\n * @return Array\n * @author Benjamin Grandfond \n * @since 2010-05-13\n */\n public static function getSession()\n {\n\n return self::getFacebookClient()->getSession();\n }\n\n /**\n * get the facebook user\n *\n * @return Array\n * @author Benjamin Grandfond \n * @since 2010-05-13\n */\n public static function getUser()\n {\n\n return self::getFacebookClient()->getUser();\n }\n\n /**\n * get datas through the faceook graph api\n * @see http://developers.facebook.com/docs/api\n *\n * @return Array\n * @author Benjamin Grandfond \n * @since 2010-05-13\n */\n public static function getFacebookApi($param)\n {\n\n return self::getFacebookClient()->api($param);\n }\n\n /**\n * gets the facebook api key\n *\n * @return Facebook\n * @author fabriceb\n * @since 2009-05-17\n */\n public static function getApiKey()\n {\n\n return sfConfig::get('app_facebook_api_key');\n }\n\n /**\n * gets the facebook api secret\n *\n * @return Facebook\n * @author fabriceb\n * @since 2009-05-17\n */\n public static function getApiSecret()\n {\n\n return sfConfig::get('app_facebook_api_secret');\n }\n\n /**\n * get the facebook app id\n *\n * @return integer\n * @author Benjamin Grandfond \n * @since 2010-09-03\n */\n public static function getApiId()\n {\n\n return sfConfig::get('app_facebook_api_id');\n }\n\n /**\n * get the facebook app cookie support\n *\n * @return Boolean\n * @author Benjamin Grandfond \n * @since 2010-05-12\n */\n public static function getApiCookie()\n {\n\n return sfConfig::get('app_facebook_api_cookie', true);\n }\n\n /**\n * get the facebook app domain\n *\n * @return Boolean\n * @author Benjamin Grandfond \n * @since 2010-05-12\n */\n public static function getApiDomain()\n {\n\n return sfConfig::get('app_facebook_api_domain', null);\n }\n\n /**\n * gets or create user with facebook uid inprofile\n *\n * @param Integer $facebook_uid\n * @param boolean $isActive\n * @return sfGuardUser $sfGuardUser\n */\n public static function getOrCreateUserByFacebookUid($facebook_uid, $isActive = true)\n {\n $sfGuardUser = self::getGuardAdapter()->getSfGuardUserByFacebookUid($facebook_uid, $isActive);\n\n if (!$sfGuardUser instanceof sfGuardUser)\n {\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} No user exists with current email hash');\n }\n $sfGuardUser = self::getGuardAdapter()->createSfGuardUserWithFacebookUid($facebook_uid);\n }\n\n return $sfGuardUser;\n }\n\n /**\n * gets user with facebook uid inprofile\n *\n * @param Integer $facebook_uid\n * @param boolean $isActive\n * @return sfGuardUser $sfGuardUser\n */\n public static function getUserByFacebookUid($facebook_uid, $isActive = true)\n {\n $sfGuardUser = self::getGuardAdapter()->retrieveSfGuardUserByFacebookUid($facebook_uid, $isActive);\n\n if (!$sfGuardUser instanceof sfGuardUser)\n {\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} No user exists with current email hash');\n }\n }\n\n return $sfGuardUser;\n }\n\n /**\n * Gets the currently logged sfGuardUser using Facebook Session\n *\n * @param boolean $create whether to automatically create a sfGuardUser\n * if none found corresponding to the Facebook session\n * @param boolean $isActive\n * @return sfGuardUser\n * @author fabriceb\n * @since 2009-05-17\n * @since 2009-08-25\n */\n public static function getSfGuardUserByFacebookSession($create = true, $isActive = true)\n {\n // We get the facebook uid from session\n $fb_uid = self::getAnyFacebookUid();\n if ($fb_uid)\n {\n\n if ($create)\n {\n\n return self::getOrCreateUserByFacebookUid($fb_uid, $isActive);\n }\n else\n {\n\n return self::getUserByFacebookUid($fb_uid, $isActive);\n }\n }\n\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} No current Facebook session');\n }\n\n return null;\n }\n\n /**\n * checks the existence of the HTTP_X_FB_USER_REMOTE_ADDR porperty in the header\n * which is a sign of being included by the fbml interface\n *\n * @return boolean\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function isInsideFacebook()\n {\n\n return isset($_SERVER['HTTP_X_FB_USER_REMOTE_ADDR']);\n }\n\n /**\n *\n * @return boolean\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function inCanvas()\n {\n\n return self::getFacebookClient()->in_fb_canvas();\n }\n\n /**\n * redirects to the login page of the Facebook application if not logged yet\n *\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function requireLogin()\n {\n self::getFacebookClient()->require_login();\n }\n\n /**\n * redirects depnding on in canvas or not\n *\n * @param $url\n * @param $statusCode\n * @return mixed sfView::NONE or sfStopException\n * @author fabriceb\n * @since Jun 8, 2009 fabriceb\n */\n public static function redirect($url, $statusCode = 302)\n {\n\n $context = sfContext::getInstance();\n $response = $context->getResponse();\n\n if (self::inCanvas())\n {\n $url = sfConfig::get('app_facebook_app_url').$context->getController()->genUrl($url, false);\n\n $text = '';\n $response->setContent(sfContext::getInstance()->getResponse()->getContent().$text);\n\n return sfView::NONE;\n }\n else\n {\n $fb_parameters = '?'.sfFacebook::getFacebookSigParameters(sfContext::getInstance()->getRequest());\n $url = $context->getController()->genUrl($url, true).$fb_parameters;\n\n $response->clearHttpHeaders();\n $response->setStatusCode($statusCode);\n $response->setHttpHeader('Location', $url);\n $response->setContent(sprintf('', 0, htmlspecialchars($url, ENT_QUOTES, sfConfig::get('sf_charset'))));\n $response->send();\n\n throw new sfStopException();\n }\n }\n\n /**\n *\n * @param integer $user_uid\n * @return integer[]\n * @author fabriceb\n * @since Jun 9, 2009 fabriceb\n */\n public static function getFacebookFriendsUids($user_uid = null)\n {\n\n try\n {\n $friends_uids = self::getFacebookApi()->friends_get(null, $user_uid);\n }\n catch(FacebookRestClientException $e)\n {\n $friends_uids = array();\n if (sfConfig::get('sf_logging_enabled'))\n {\n sfContext::getInstance()->getLogger()->info('{FacebookRestClientException} '.$e->getMessage());\n }\n }\n\n return $friends_uids;\n }\n\n /**\n *\n * @return sfFacebookGuardAdapter\n * @author fabriceb\n * @since Aug 10, 2009\n * @since 2009-10-08 Alban Creton : added configurability of the Guard Adapter.\n */\n public static function getGuardAdapter()\n {\n if (self::$guard_adapter === null)\n {\n if(sfConfig::get('app_facebook_guard_adapter') && class_exists(sfConfig::get('app_facebook_guard_adapter'), true))\n {\n $class = sfConfig::get('app_facebook_guard_adapter');\n }\n else if (class_exists('sfGuardUserPeer', true))\n {\n $class = 'sfFacebookPropelGuardAdapter';\n }\n else\n {\n $class = 'sfFacebookDoctrineGuardAdapter';\n }\n self::$guard_adapter = new $class();\n }\n if (!self::$guard_adapter)\n {\n error_log('Could not create guard adapter.');\n }\n\n return self::$guard_adapter;\n }\n\n\n /**\n *\n * @return boolean\n * @author fabriceb\n * @since Aug 27, 2009\n */\n public static function isJsLoaded()\n {\n\n return self::$is_js_loaded;\n }\n\n /**\n *\n * @return void\n * @author fabriceb\n * @since Aug 27, 2009\n */\n public static function setJsLoaded()\n {\n self::$is_js_loaded = true;\n }\n\n /**\n * Dirty way to convert fr into fr_FR\n * @param string $culture\n * @return string\n * @author fabriceb\n * @since Aug 28, 2009\n */\n public static function getLocale($culture = null)\n {\n if (is_null($culture))\n {\n $culture = sfContext::getInstance()->getUser()->getCulture();\n }\n\n $culture_to_locale = array(\n 'fr' => 'fr_FR',\n 'en' => 'en_US',\n 'de' => 'de_DE',\n 'it' => 'it_IT',\n );\n\n return array_key_exists($culture, $culture_to_locale) ? $culture_to_locale[$culture] : $culture;\n }\n\n /**\n * @return interger $facebook_uid\n * @author fabriceb\n * @since Oct 6, 2009\n */\n public static function getAnyFacebookUid()\n {\n\n $cookie = self::getFacebookCookie();\n $fb_uid = $cookie['uid'];\n sfContext::getInstance()->getLogger()->info('{sfFacebookConnect} Fb_uid from cookie : '.$fb_uid);\n\n return $fb_uid;\n }\n\n /**\n *\n * @param sfWebRequest $request\n * @return string\n * @author fabriceb\n * @since Oct 12, 2009\n */\n public static function getFacebookSigParameters(sfWebRequest $request)\n {\n $parameters = $request->getParameterHolder()->getAll();\n\n $parameter_string = '';\n foreach ($parameters as $key => $parameter)\n {\n if (substr($key,0,3)=='fb_')\n {\n $parameter_string .= '&'.$key.'='.$parameter;\n }\n }\n\n return $parameter_string;\n }\n\n}\n"}}
-{"repo": "MacSysadmin/pymacadmin", "pr_number": 13, "title": "Support for regexes, clearer logging, handle missing user_info", "state": "closed", "merged_at": "2016-01-16T03:38:04Z", "additions": 12, "deletions": 7, "files_changed": ["bin/crankd.py"], "files_before": {"bin/crankd.py": "#!/usr/bin/python2.7\n# encoding: utf-8\n\n\"\"\"\nUsage: %prog\n\nMonitor system event notifications\n\nConfiguration:\n\nThe configuration file is divided into sections for each class of\nevents. Each section is a dictionary using the event condition as the\nkey (\"NSWorkspaceDidWakeNotification\", \"State:/Network/Global/IPv4\",\netc). Each event must have one of the following properties:\n\ncommand: a shell command\nfunction: the name of a python function\nclass: the name of a python class which will be instantiated once\n and have methods called as events occur.\nmethod: (class, method) tuple\n\"\"\"\n\nfrom Cocoa import \\\n CFAbsoluteTimeGetCurrent, \\\n CFRunLoopAddSource, \\\n CFRunLoopAddTimer, \\\n CFRunLoopTimerCreate, \\\n NSObject, \\\n NSRunLoop, \\\n NSWorkspace, \\\n kCFRunLoopCommonModes\n\nfrom SystemConfiguration import \\\n SCDynamicStoreCopyKeyList, \\\n SCDynamicStoreCreate, \\\n SCDynamicStoreCreateRunLoopSource, \\\n SCDynamicStoreSetNotificationKeys\n\nfrom FSEvents import \\\n FSEventStreamCreate, \\\n FSEventStreamStart, \\\n FSEventStreamScheduleWithRunLoop, \\\n kFSEventStreamEventIdSinceNow, \\\n kCFRunLoopDefaultMode, \\\n kFSEventStreamEventFlagMustScanSubDirs, \\\n kFSEventStreamEventFlagUserDropped, \\\n kFSEventStreamEventFlagKernelDropped\n\nimport os\nimport os.path\nimport logging\nimport logging.handlers\nimport sys\nimport re\nfrom subprocess import call\nfrom optparse import OptionParser\nfrom plistlib import readPlist, writePlist\nfrom PyObjCTools import AppHelper\nfrom functools import partial\nimport signal\nfrom datetime import datetime\nfrom objc import super\n\n\nVERSION = '$Revision: #4 $'\n\nHANDLER_OBJECTS = dict() # Events which have a \"class\" handler use an instantiated object; we want to load only one copy\nSC_HANDLERS = dict() # Callbacks indexed by SystemConfiguration keys\nFS_WATCHED_FILES = dict() # Callbacks indexed by filesystem path\nWORKSPACE_HANDLERS = dict() # handlers for workspace events\n\n\nclass BaseHandler(object):\n # pylint: disable-msg=C0111,R0903\n pass\n\nclass NotificationHandler(NSObject):\n \"\"\"Simple base class for handling NSNotification events\"\"\"\n # Method names and class structure are dictated by Cocoa & PyObjC, which\n # is substantially different from PEP-8:\n # pylint: disable-msg=C0103,W0232,R0903\n\n def init(self):\n \"\"\"NSObject-compatible initializer\"\"\"\n self = super(NotificationHandler, self).init()\n if self is None: return None\n self.callable = self.not_implemented\n return self # NOTE: Unlike Python, NSObject's init() must return self!\n\n def not_implemented(self, *args, **kwargs):\n \"\"\"A dummy function which exists only to catch configuration errors\"\"\"\n # TODO: Is there a better way to report the caller's location?\n import inspect\n stack = inspect.stack()\n my_name = stack[0][3]\n caller = stack[1][3]\n raise NotImplementedError(\n \"%s should have been overridden. Called by %s as: %s(%s)\" % (\n my_name,\n caller,\n my_name,\n \", \".join(map(repr, args) + [ \"%s=%s\" % (k, repr(v)) for k,v in kwargs.items() ])\n )\n )\n\n def onNotification_(self, the_notification):\n \"\"\"Pass an NSNotifications to our handler\"\"\"\n if the_notification.userInfo:\n user_info = the_notification.userInfo()\n else:\n user_info = None\n self.callable(user_info=user_info) # pylint: disable-msg=E1101\n\n\ndef log_list(msg, items, level=logging.INFO):\n \"\"\"\n Record a a list of values with a message\n\n This would ordinarily be a simple logging call but we want to keep the\n length below the 1024-byte syslog() limitation and we'll format things\n nicely by repeating our message with as many of the values as will fit.\n\n Individual items longer than the maximum length will be truncated.\n \"\"\"\n\n max_len = 1024 - len(msg % \"\")\n cur_len = 0\n cur_items = list()\n\n while [ i[:max_len] for i in items]:\n i = items.pop()\n if cur_len + len(i) + 2 > max_len:\n logging.info(msg % \", \".join(cur_items))\n cur_len = 0\n cur_items = list()\n\n cur_items.append(i)\n cur_len += len(i) + 2\n\n logging.log(level, msg % \", \".join(cur_items))\n\ndef get_callable_for_event(name, event_config, context=None):\n \"\"\"\n Returns a callable object which can be used as a callback for any\n event. The returned function has context information, logging, etc.\n included so they do not need to be passed when the actual event\n occurs.\n\n NOTE: This function does not process \"class\" handlers - by design they\n are passed to the system libraries which expect a delegate object with\n various event handling methods\n \"\"\"\n\n kwargs = {\n 'context': context,\n 'key': name,\n 'config': event_config,\n }\n\n if \"command\" in event_config:\n f = partial(do_shell, event_config[\"command\"], **kwargs)\n elif \"function\" in event_config:\n f = partial(get_callable_from_string(event_config[\"function\"]), **kwargs)\n elif \"method\" in event_config:\n f = partial(getattr(get_handler_object(event_config['method'][0]), event_config['method'][1]), **kwargs)\n else:\n raise AttributeError(\"%s have a class, method, function or command\" % name)\n\n return f\n\n\ndef get_mod_func(callback):\n \"\"\"Convert a fully-qualified module.function name to (module, function) - stolen from Django\"\"\"\n try:\n dot = callback.rindex('.')\n except ValueError:\n return (callback, '')\n return (callback[:dot], callback[dot+1:])\n\n\ndef get_callable_from_string(f_name):\n \"\"\"Takes a string containing a function name (optionally module qualified) and returns a callable object\"\"\"\n try:\n mod_name, func_name = get_mod_func(f_name)\n if mod_name == \"\" and func_name == \"\":\n raise AttributeError(\"%s couldn't be converted to a module or function name\" % f_name)\n\n module = __import__(mod_name)\n\n if func_name == \"\":\n func_name = mod_name # The common case is an eponymous class\n\n return getattr(module, func_name)\n\n except (ImportError, AttributeError), exc:\n raise RuntimeError(\"Unable to create a callable object for '%s': %s\" % (f_name, exc))\n\n\ndef get_handler_object(class_name):\n \"\"\"Return a single instance of the given class name, instantiating it if necessary\"\"\"\n\n if class_name not in HANDLER_OBJECTS:\n h_obj = get_callable_from_string(class_name)()\n if isinstance(h_obj, BaseHandler):\n pass # TODO: Do we even need BaseHandler any more?\n HANDLER_OBJECTS[class_name] = h_obj\n\n return HANDLER_OBJECTS[class_name]\n\n\ndef handle_sc_event(store, changed_keys, info):\n \"\"\"Fire every event handler for one or more events\"\"\"\n\n for key in changed_keys:\n SC_HANDLERS[key](key=key, info=info)\n\n\ndef list_events(option, opt_str, value, parser):\n \"\"\"Displays the list of events which can be monitored on the current system\"\"\"\n\n print 'On this system SystemConfiguration supports these events:'\n for event in sorted(SCDynamicStoreCopyKeyList(get_sc_store(), '.*')):\n print \"\\t\", event\n\n print\n print \"Standard NSWorkspace Notification messages:\\n\\t\",\n print \"\\n\\t\".join('''\n NSWorkspaceDidLaunchApplicationNotification\n NSWorkspaceDidMountNotification\n NSWorkspaceDidPerformFileOperationNotification\n NSWorkspaceDidTerminateApplicationNotification\n NSWorkspaceDidUnmountNotification\n NSWorkspaceDidWakeNotification\n NSWorkspaceSessionDidBecomeActiveNotification\n NSWorkspaceSessionDidResignActiveNotification\n NSWorkspaceWillLaunchApplicationNotification\n NSWorkspaceWillPowerOffNotification\n NSWorkspaceWillSleepNotification\n NSWorkspaceWillUnmountNotification\n '''.split())\n\n sys.exit(0)\n\n\ndef process_commandline():\n \"\"\"\n Process command-line options\n Load our preference file\n Configure the module path to add Application Support directories\n \"\"\"\n parser = OptionParser(__doc__.strip())\n support_path = '/Library/' if os.getuid() == 0 else os.path.expanduser('~/Library/')\n preference_file = os.path.join(support_path, 'Preferences', 'com.googlecode.pymacadmin.crankd.plist')\n module_path = os.path.join(support_path, 'Application Support/crankd')\n\n if os.path.exists(module_path):\n sys.path.append(module_path)\n else:\n print >> sys.stderr, \"Module directory %s does not exist: Python handlers will need to use absolute pathnames\" % module_path\n\n parser.add_option(\"-f\", \"--config\", dest=\"config_file\", help='Use an alternate config file instead of %default', default=preference_file)\n parser.add_option(\"-l\", \"--list-events\", action=\"callback\", callback=list_events, help=\"List the events which can be monitored\")\n parser.add_option(\"-d\", \"--debug\", action=\"count\", default=False, help=\"Log detailed progress information\")\n (options, args) = parser.parse_args()\n\n if len(args):\n parser.error(\"Unknown command-line arguments: %s\" % args)\n\n options.support_path = support_path\n options.config_file = os.path.realpath(options.config_file)\n\n # This is somewhat messy but we want to alter the command-line to use full\n # file paths in case someone's code changes the current directory or the\n sys.argv = [ os.path.realpath(sys.argv[0]), ]\n\n if options.debug:\n logging.getLogger().setLevel(logging.DEBUG)\n sys.argv.append(\"--debug\")\n\n if options.config_file:\n sys.argv.append(\"--config\")\n sys.argv.append(options.config_file)\n\n return options\n\n\ndef load_config(options):\n \"\"\"Load our configuration from plist or create a default file if none exists\"\"\"\n if not os.path.exists(options.config_file):\n logging.info(\"%s does not exist - initializing with an example configuration\" % CRANKD_OPTIONS.config_file)\n print >>sys.stderr, 'Creating %s with default options for you to customize' % options.config_file\n print >>sys.stderr, '%s --list-events will list the events you can monitor on this system' % sys.argv[0]\n example_config = {\n 'SystemConfiguration': {\n 'State:/Network/Global/IPv4': {\n 'command': '/bin/echo \"Global IPv4 config changed\"'\n }\n },\n 'NSWorkspace': {\n 'NSWorkspaceDidMountNotification': {\n 'command': '/bin/echo \"A new volume was mounted!\"'\n },\n 'NSWorkspaceDidWakeNotification': {\n 'command': '/bin/echo \"The system woke from sleep!\"'\n },\n 'NSWorkspaceWillSleepNotification': {\n 'command': '/bin/echo \"The system is about to go to sleep!\"'\n }\n }\n }\n writePlist(example_config, options.config_file)\n sys.exit(1)\n\n logging.info(\"Loading configuration from %s\" % CRANKD_OPTIONS.config_file)\n\n plist = readPlist(options.config_file)\n\n if \"imports\" in plist:\n for module in plist['imports']:\n try:\n __import__(module)\n except ImportError, exc:\n print >> sys.stderr, \"Unable to import %s: %s\" % (module, exc)\n sys.exit(1)\n return plist\n\n\ndef configure_logging():\n \"\"\"Configures the logging module\"\"\"\n logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')\n\n # Enable logging to syslog as well:\n # Normally this would not be necessary but logging assumes syslog listens on\n # localhost syslog/udp, which is disabled on 10.5 (rdar://5871746)\n syslog = logging.handlers.SysLogHandler('/var/run/syslog')\n syslog.setFormatter(logging.Formatter('%(name)s: %(message)s'))\n syslog.setLevel(logging.INFO)\n logging.getLogger().addHandler(syslog)\n\n\ndef get_sc_store():\n \"\"\"Returns an SCDynamicStore instance\"\"\"\n return SCDynamicStoreCreate(None, \"crankd\", handle_sc_event, None)\n\n\ndef add_workspace_notifications(nsw_config):\n # See http://developer.apple.com/documentation/Cocoa/Conceptual/Workspace/Workspace.html\n notification_center = NSWorkspace.sharedWorkspace().notificationCenter()\n\n for event in nsw_config:\n event_config = nsw_config[event]\n\n if \"class\" in event_config:\n obj = get_handler_object(event_config['class'])\n objc_method = \"on%s:\" % event\n py_method = objc_method.replace(\":\", \"_\")\n if not hasattr(obj, py_method) or not callable(getattr(obj, py_method)):\n print >> sys.stderr, \\\n \"NSWorkspace Notification %s: handler class %s must define a %s method\" % (event, event_config['class'], py_method)\n sys.exit(1)\n\n notification_center.addObserver_selector_name_object_(obj, objc_method, event, None)\n else:\n handler = NotificationHandler.new()\n handler.name = \"NSWorkspace Notification %s\" % event\n handler.callable = get_callable_for_event(event, event_config, context=handler.name)\n\n assert(callable(handler.onNotification_))\n\n notification_center.addObserver_selector_name_object_(handler, \"onNotification:\", event, None)\n WORKSPACE_HANDLERS[event] = handler\n\n log_list(\"Listening for these NSWorkspace notifications: %s\", nsw_config.keys())\n\n\ndef add_sc_notifications(sc_config):\n \"\"\"\n This uses the SystemConfiguration framework to get a SCDynamicStore session\n and register for certain events. See the Apple SystemConfiguration\n documentation for details:\n\n \n\n TN1145 may also be of interest:\n \n\n Inspired by the PyObjC SystemConfiguration callback demos:\n \n \"\"\"\n\n keys = sc_config.keys()\n\n try:\n for key in keys:\n SC_HANDLERS[key] = get_callable_for_event(key, sc_config[key], context=\"SystemConfiguration: %s\" % key)\n except AttributeError, exc:\n print >> sys.stderr, \"Error configuring SystemConfiguration events: %s\" % exc\n sys.exit(1)\n\n store = get_sc_store()\n\n SCDynamicStoreSetNotificationKeys(store, None, keys)\n\n # Get a CFRunLoopSource for our store session and add it to the application's runloop:\n CFRunLoopAddSource(\n NSRunLoop.currentRunLoop().getCFRunLoop(),\n SCDynamicStoreCreateRunLoopSource(None, store, 0),\n kCFRunLoopCommonModes\n )\n\n log_list(\"Listening for these SystemConfiguration events: %s\", keys)\n\n\ndef add_fs_notifications(fs_config):\n for path in fs_config:\n add_fs_notification(path, get_callable_for_event(path, fs_config[path], context=\"FSEvent: %s\" % path))\n\n\ndef add_fs_notification(f_path, callback):\n \"\"\"Adds an FSEvent notification for the specified path\"\"\"\n path = os.path.realpath(os.path.expanduser(f_path))\n if not os.path.exists(path):\n raise AttributeError(\"Cannot add an FSEvent notification: %s does not exist!\" % path)\n\n if not os.path.isdir(path):\n path = os.path.dirname(path)\n\n try:\n FS_WATCHED_FILES[path].append(callback)\n except KeyError:\n FS_WATCHED_FILES[path] = [callback]\n\n\ndef start_fs_events():\n stream_ref = FSEventStreamCreate(\n None, # Use the default CFAllocator\n fsevent_callback,\n None, # We don't need a FSEventStreamContext\n FS_WATCHED_FILES.keys(),\n kFSEventStreamEventIdSinceNow, # We only want events which happen in the future\n 1.0, # Process events within 1 second\n 0 # We don't need any special flags for our stream\n )\n\n if not stream_ref:\n raise RuntimeError(\"FSEventStreamCreate() failed!\")\n\n FSEventStreamScheduleWithRunLoop(stream_ref, NSRunLoop.currentRunLoop().getCFRunLoop(), kCFRunLoopDefaultMode)\n\n if not FSEventStreamStart(stream_ref):\n raise RuntimeError(\"Unable to start FSEvent stream!\")\n\n logging.debug(\"FSEventStream started for %d paths: %s\" % (len(FS_WATCHED_FILES), \", \".join(FS_WATCHED_FILES)))\n\n\ndef fsevent_callback(stream_ref, full_path, event_count, paths, masks, ids):\n \"\"\"Process an FSEvent (consult the Cocoa docs) and call each of our handlers which monitors that path or a parent\"\"\"\n for i in range(event_count):\n path = os.path.dirname(paths[i])\n\n if masks[i] & kFSEventStreamEventFlagMustScanSubDirs:\n recursive = True\n\n if masks[i] & kFSEventStreamEventFlagUserDropped:\n logging.error(\"We were too slow processing FSEvents and some events were dropped\")\n recursive = True\n\n if masks[i] & kFSEventStreamEventFlagKernelDropped:\n logging.error(\"The kernel was too slow processing FSEvents and some events were dropped!\")\n recursive = True\n else:\n recursive = False\n\n for i in [k for k in FS_WATCHED_FILES if path.startswith(k)]:\n logging.debug(\"FSEvent: %s: processing %d callback(s) for path %s\" % (i, len(FS_WATCHED_FILES[i]), path))\n for j in FS_WATCHED_FILES[i]:\n j(i, path=path, recursive=recursive)\n\n\ndef timer_callback(*args):\n \"\"\"Handles the timer events which we use simply to have the runloop run regularly. Currently this logs a timestamp for debugging purposes\"\"\"\n logging.debug(\"timer callback at %s\" % datetime.now())\n\n\ndef main():\n configure_logging()\n\n global CRANKD_OPTIONS, CRANKD_CONFIG\n CRANKD_OPTIONS = process_commandline()\n CRANKD_CONFIG = load_config(CRANKD_OPTIONS)\n\n if \"NSWorkspace\" in CRANKD_CONFIG:\n add_workspace_notifications(CRANKD_CONFIG['NSWorkspace'])\n\n if \"SystemConfiguration\" in CRANKD_CONFIG:\n add_sc_notifications(CRANKD_CONFIG['SystemConfiguration'])\n\n if \"FSEvents\" in CRANKD_CONFIG:\n add_fs_notifications(CRANKD_CONFIG['FSEvents'])\n\n # We reuse our FSEvents code to watch for changes to our files and\n # restart if any of our libraries have been updated:\n add_conditional_restart(CRANKD_OPTIONS.config_file, \"Configuration file %s changed\" % CRANKD_OPTIONS.config_file)\n for m in filter(lambda i: i and hasattr(i, '__file__'), sys.modules.values()):\n if m.__name__ == \"__main__\":\n msg = \"%s was updated\" % m.__file__\n else:\n msg = \"Module %s was updated\" % m.__name__\n\n add_conditional_restart(m.__file__, msg)\n\n signal.signal(signal.SIGHUP, partial(restart, \"SIGHUP received\"))\n\n start_fs_events()\n\n # NOTE: This timer is basically a kludge around the fact that we can't reliably get\n # signals or Control-C inside a runloop. This wakes us up often enough to\n # appear tolerably responsive:\n CFRunLoopAddTimer(\n NSRunLoop.currentRunLoop().getCFRunLoop(),\n CFRunLoopTimerCreate(None, CFAbsoluteTimeGetCurrent(), 2.0, 0, 0, timer_callback, None),\n kCFRunLoopCommonModes\n )\n\n try:\n AppHelper.runConsoleEventLoop(installInterrupt=True)\n except KeyboardInterrupt:\n logging.info(\"KeyboardInterrupt received, exiting\")\n\n sys.exit(0)\n\ndef create_env_name(name):\n \"\"\"\n Converts input names into more traditional shell environment name style\n\n >>> create_env_name(\"NSApplicationBundleIdentifier\")\n 'NSAPPLICATION_BUNDLE_IDENTIFIER'\n >>> create_env_name(\"NSApplicationBundleIdentifier-1234$foobar!\")\n 'NSAPPLICATION_BUNDLE_IDENTIFIER_1234_FOOBAR'\n \"\"\"\n new_name = re.sub(r'''(?<=[a-z])([A-Z])''', '_\\\\1', name)\n new_name = re.sub(r'\\W+', '_', new_name)\n new_name = re.sub(r'_{2,}', '_', new_name)\n return new_name.upper().strip(\"_\")\n\ndef do_shell(command, context=None, **kwargs):\n \"\"\"Executes a shell command with logging\"\"\"\n logging.info(\"%s: executing %s\" % (context, command))\n\n child_env = {'CRANKD_CONTEXT': context}\n\n # We'll pull a subset of the available information in for shell scripts.\n # Anyone who needs more will probably want to write a Python handler\n # instead so they can reuse things like our logger & config info and avoid\n # ordeals like associative arrays in Bash\n for k in [ 'info', 'key' ]:\n if k in kwargs and kwargs[k]:\n child_env['CRANKD_%s' % k.upper()] = str(kwargs[k])\n\n user_info = kwargs.get(\"user_info\")\n if user_info:\n for k, v in user_info.items():\n child_env[create_env_name(k)] = str(v)\n\n try:\n rc = call(command, shell=True, env=child_env)\n if rc == 0:\n logging.debug(\"`%s` returned %d\" % (command, rc))\n elif rc < 0:\n logging.error(\"`%s` was terminated by signal %d\" % (command, -rc))\n else:\n logging.error(\"`%s` returned %d\" % (command, rc))\n except OSError, exc:\n logging.error(\"Got an exception when executing %s:\" % (command, exc))\n\n\ndef add_conditional_restart(file_name, reason):\n \"\"\"FSEvents monitors directories, not files. This function uses stat to\n restart only if the file's mtime has changed\"\"\"\n file_name = os.path.realpath(file_name)\n while not os.path.exists(file_name):\n file_name = os.path.dirname(file_name)\n orig_stat = os.stat(file_name).st_mtime\n\n def cond_restart(*args, **kwargs):\n try:\n if os.stat(file_name).st_mtime != orig_stat:\n restart(reason)\n except (OSError, IOError, RuntimeError), exc:\n restart(\"Exception while checking %s: %s\" % (file_name, exc))\n\n add_fs_notification(file_name, cond_restart)\n\n\ndef restart(reason, *args, **kwargs):\n \"\"\"Perform a complete restart of the current process using exec()\"\"\"\n logging.info(\"Restarting: %s\" % reason)\n os.execv(sys.argv[0], sys.argv)\n\nif __name__ == '__main__':\n main()\n"}, "files_after": {"bin/crankd.py": "#!/usr/bin/python2.7\n# encoding: utf-8\n\n\"\"\"\nUsage: %prog\n\nMonitor system event notifications\n\nConfiguration:\n\nThe configuration file is divided into sections for each class of\nevents. Each section is a dictionary using the event condition as the\nkey (\"NSWorkspaceDidWakeNotification\", \"State:/Network/Global/IPv4\",\netc). Each event must have one of the following properties:\n\ncommand: a shell command\nfunction: the name of a python function\nclass: the name of a python class which will be instantiated once\n and have methods called as events occur.\nmethod: (class, method) tuple\n\"\"\"\n\nfrom Cocoa import \\\n CFAbsoluteTimeGetCurrent, \\\n CFRunLoopAddSource, \\\n CFRunLoopAddTimer, \\\n CFRunLoopTimerCreate, \\\n NSObject, \\\n NSRunLoop, \\\n NSWorkspace, \\\n kCFRunLoopCommonModes\n\nfrom SystemConfiguration import \\\n SCDynamicStoreCopyKeyList, \\\n SCDynamicStoreCreate, \\\n SCDynamicStoreCreateRunLoopSource, \\\n SCDynamicStoreSetNotificationKeys\n\nfrom FSEvents import \\\n FSEventStreamCreate, \\\n FSEventStreamStart, \\\n FSEventStreamScheduleWithRunLoop, \\\n kFSEventStreamEventIdSinceNow, \\\n kCFRunLoopDefaultMode, \\\n kFSEventStreamEventFlagMustScanSubDirs, \\\n kFSEventStreamEventFlagUserDropped, \\\n kFSEventStreamEventFlagKernelDropped\n\nimport os\nimport os.path\nimport logging\nimport logging.handlers\nimport sys\nimport re\nfrom subprocess import call\nfrom optparse import OptionParser\nfrom plistlib import readPlist, writePlist\nfrom PyObjCTools import AppHelper\nfrom functools import partial\nimport signal\nfrom datetime import datetime\nfrom objc import super\n\n\nVERSION = '$Revision: #4 $'\n\nHANDLER_OBJECTS = dict() # Events which have a \"class\" handler use an instantiated object; we want to load only one copy\nSC_HANDLERS = dict() # Callbacks indexed by SystemConfiguration keys\nFS_WATCHED_FILES = dict() # Callbacks indexed by filesystem path\nWORKSPACE_HANDLERS = dict() # handlers for workspace events\n\n\nclass BaseHandler(object):\n # pylint: disable-msg=C0111,R0903\n pass\n\nclass NotificationHandler(NSObject):\n \"\"\"Simple base class for handling NSNotification events\"\"\"\n # Method names and class structure are dictated by Cocoa & PyObjC, which\n # is substantially different from PEP-8:\n # pylint: disable-msg=C0103,W0232,R0903\n\n def init(self):\n \"\"\"NSObject-compatible initializer\"\"\"\n self = super(NotificationHandler, self).init()\n if self is None: return None\n self.callable = self.not_implemented\n return self # NOTE: Unlike Python, NSObject's init() must return self!\n\n def not_implemented(self, *args, **kwargs):\n \"\"\"A dummy function which exists only to catch configuration errors\"\"\"\n # TODO: Is there a better way to report the caller's location?\n import inspect\n stack = inspect.stack()\n my_name = stack[0][3]\n caller = stack[1][3]\n raise NotImplementedError(\n \"%s should have been overridden. Called by %s as: %s(%s)\" % (\n my_name,\n caller,\n my_name,\n \", \".join(map(repr, args) + [ \"%s=%s\" % (k, repr(v)) for k,v in kwargs.items() ])\n )\n )\n\n def onNotification_(self, the_notification):\n \"\"\"Pass an NSNotifications to our handler\"\"\"\n if the_notification.userInfo:\n user_info = the_notification.userInfo()\n else:\n user_info = None\n self.callable(user_info=user_info) # pylint: disable-msg=E1101\n\n\ndef log_list(msg, items, level=logging.INFO):\n \"\"\"\n Record a a list of values with a message\n\n This would ordinarily be a simple logging call but we want to keep the\n length below the 1024-byte syslog() limitation and we'll format things\n nicely by repeating our message with as many of the values as will fit.\n\n Individual items longer than the maximum length will be truncated.\n \"\"\"\n\n max_len = 1024 - len(msg % \"\")\n cur_len = 0\n cur_items = list()\n\n while [ i[:max_len] for i in items]:\n i = items.pop()\n if cur_len + len(i) + 2 > max_len:\n logging.info(msg % \", \".join(cur_items))\n cur_len = 0\n cur_items = list()\n\n cur_items.append(i)\n cur_len += len(i) + 2\n\n logging.log(level, msg % \", \".join(cur_items))\n\ndef get_callable_for_event(name, event_config, context=None):\n \"\"\"\n Returns a callable object which can be used as a callback for any\n event. The returned function has context information, logging, etc.\n included so they do not need to be passed when the actual event\n occurs.\n\n NOTE: This function does not process \"class\" handlers - by design they\n are passed to the system libraries which expect a delegate object with\n various event handling methods\n \"\"\"\n\n kwargs = {\n 'context': context,\n 'key': name,\n 'config': event_config,\n }\n\n if \"command\" in event_config:\n f = partial(do_shell, event_config[\"command\"], **kwargs)\n elif \"function\" in event_config:\n f = partial(get_callable_from_string(event_config[\"function\"]), **kwargs)\n elif \"method\" in event_config:\n f = partial(getattr(get_handler_object(event_config['method'][0]), event_config['method'][1]), **kwargs)\n else:\n raise AttributeError(\"%s have a class, method, function or command\" % name)\n\n return f\n\n\ndef get_mod_func(callback):\n \"\"\"Convert a fully-qualified module.function name to (module, function) - stolen from Django\"\"\"\n try:\n dot = callback.rindex('.')\n except ValueError:\n return (callback, '')\n return (callback[:dot], callback[dot+1:])\n\n\ndef get_callable_from_string(f_name):\n \"\"\"Takes a string containing a function name (optionally module qualified) and returns a callable object\"\"\"\n try:\n mod_name, func_name = get_mod_func(f_name)\n if mod_name == \"\" and func_name == \"\":\n raise AttributeError(\"%s couldn't be converted to a module or function name\" % f_name)\n\n module = __import__(mod_name)\n\n if func_name == \"\":\n func_name = mod_name # The common case is an eponymous class\n\n return getattr(module, func_name)\n\n except (ImportError, AttributeError), exc:\n raise RuntimeError(\"Unable to create a callable object for '%s': %s\" % (f_name, exc))\n\n\ndef get_handler_object(class_name):\n \"\"\"Return a single instance of the given class name, instantiating it if necessary\"\"\"\n\n if class_name not in HANDLER_OBJECTS:\n h_obj = get_callable_from_string(class_name)()\n if isinstance(h_obj, BaseHandler):\n pass # TODO: Do we even need BaseHandler any more?\n HANDLER_OBJECTS[class_name] = h_obj\n\n return HANDLER_OBJECTS[class_name]\n\n\ndef handle_sc_event(store, changed_keys, info):\n \"\"\"Fire every event handler for one or more events\"\"\"\n try:\n for key in changed_keys:\n SC_HANDLERS[key](key=key, info=info)\n except KeyError:\n # If there's no exact match, go through the list again assuming regex\n for key in changed_keys:\n for handler in SC_HANDLERS:\n if re.match(handler, key):\n SC_HANDLERS[handler](key=key, info=info)\n\n\n\ndef list_events(option, opt_str, value, parser):\n \"\"\"Displays the list of events which can be monitored on the current system\"\"\"\n\n print 'On this system SystemConfiguration supports these events:'\n for event in sorted(SCDynamicStoreCopyKeyList(get_sc_store(), '.*')):\n print \"\\t\", event\n\n print\n print \"Standard NSWorkspace Notification messages:\\n\\t\",\n print \"\\n\\t\".join('''\n NSWorkspaceDidLaunchApplicationNotification\n NSWorkspaceDidMountNotification\n NSWorkspaceDidPerformFileOperationNotification\n NSWorkspaceDidTerminateApplicationNotification\n NSWorkspaceDidUnmountNotification\n NSWorkspaceDidWakeNotification\n NSWorkspaceSessionDidBecomeActiveNotification\n NSWorkspaceSessionDidResignActiveNotification\n NSWorkspaceWillLaunchApplicationNotification\n NSWorkspaceWillPowerOffNotification\n NSWorkspaceWillSleepNotification\n NSWorkspaceWillUnmountNotification\n '''.split())\n\n sys.exit(0)\n\n\ndef process_commandline():\n \"\"\"\n Process command-line options\n Load our preference file\n Configure the module path to add Application Support directories\n \"\"\"\n parser = OptionParser(__doc__.strip())\n support_path = '/Library/' if os.getuid() == 0 else os.path.expanduser('~/Library/')\n preference_file = os.path.join(support_path, 'Preferences', 'com.googlecode.pymacadmin.crankd.plist')\n module_path = os.path.join(support_path, 'Application Support/crankd')\n\n if os.path.exists(module_path):\n sys.path.append(module_path)\n else:\n print >> sys.stderr, \"Module directory %s does not exist: Python handlers will need to use absolute pathnames\" % module_path\n\n parser.add_option(\"-f\", \"--config\", dest=\"config_file\", help='Use an alternate config file instead of %default', default=preference_file)\n parser.add_option(\"-l\", \"--list-events\", action=\"callback\", callback=list_events, help=\"List the events which can be monitored\")\n parser.add_option(\"-d\", \"--debug\", action=\"count\", default=False, help=\"Log detailed progress information\")\n (options, args) = parser.parse_args()\n\n if len(args):\n parser.error(\"Unknown command-line arguments: %s\" % args)\n\n options.support_path = support_path\n options.config_file = os.path.realpath(options.config_file)\n\n # This is somewhat messy but we want to alter the command-line to use full\n # file paths in case someone's code changes the current directory or the\n sys.argv = [ os.path.realpath(sys.argv[0]), ]\n\n if options.debug:\n logging.getLogger().setLevel(logging.DEBUG)\n sys.argv.append(\"--debug\")\n\n if options.config_file:\n sys.argv.append(\"--config\")\n sys.argv.append(options.config_file)\n\n return options\n\n\ndef load_config(options):\n \"\"\"Load our configuration from plist or create a default file if none exists\"\"\"\n if not os.path.exists(options.config_file):\n logging.info(\"%s does not exist - initializing with an example configuration\" % CRANKD_OPTIONS.config_file)\n print >>sys.stderr, 'Creating %s with default options for you to customize' % options.config_file\n print >>sys.stderr, '%s --list-events will list the events you can monitor on this system' % sys.argv[0]\n example_config = {\n 'SystemConfiguration': {\n 'State:/Network/Global/IPv4': {\n 'command': '/bin/echo \"Global IPv4 config changed\"'\n }\n },\n 'NSWorkspace': {\n 'NSWorkspaceDidMountNotification': {\n 'command': '/bin/echo \"A new volume was mounted!\"'\n },\n 'NSWorkspaceDidWakeNotification': {\n 'command': '/bin/echo \"The system woke from sleep!\"'\n },\n 'NSWorkspaceWillSleepNotification': {\n 'command': '/bin/echo \"The system is about to go to sleep!\"'\n }\n }\n }\n writePlist(example_config, options.config_file)\n sys.exit(1)\n\n logging.info(\"Loading configuration from %s\" % CRANKD_OPTIONS.config_file)\n\n plist = readPlist(options.config_file)\n\n if \"imports\" in plist:\n for module in plist['imports']:\n try:\n __import__(module)\n except ImportError, exc:\n print >> sys.stderr, \"Unable to import %s: %s\" % (module, exc)\n sys.exit(1)\n return plist\n\n\ndef configure_logging():\n \"\"\"Configures the logging module\"\"\"\n logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')\n\n # Enable logging to syslog as well:\n # Normally this would not be necessary but logging assumes syslog listens on\n # localhost syslog/udp, which is disabled on 10.5 (rdar://5871746)\n syslog = logging.handlers.SysLogHandler('/var/run/syslog')\n syslog.setFormatter(logging.Formatter('%(pathname)s[%(process)d]:%(message)s'))\n syslog.setLevel(logging.INFO)\n logging.getLogger().addHandler(syslog)\n\n\ndef get_sc_store():\n \"\"\"Returns an SCDynamicStore instance\"\"\"\n return SCDynamicStoreCreate(None, \"crankd\", handle_sc_event, None)\n\n\ndef add_workspace_notifications(nsw_config):\n # See http://developer.apple.com/documentation/Cocoa/Conceptual/Workspace/Workspace.html\n notification_center = NSWorkspace.sharedWorkspace().notificationCenter()\n\n for event in nsw_config:\n event_config = nsw_config[event]\n\n if \"class\" in event_config:\n obj = get_handler_object(event_config['class'])\n objc_method = \"on%s:\" % event\n py_method = objc_method.replace(\":\", \"_\")\n if not hasattr(obj, py_method) or not callable(getattr(obj, py_method)):\n print >> sys.stderr, \\\n \"NSWorkspace Notification %s: handler class %s must define a %s method\" % (event, event_config['class'], py_method)\n sys.exit(1)\n\n notification_center.addObserver_selector_name_object_(obj, objc_method, event, None)\n else:\n handler = NotificationHandler.new()\n handler.name = \"NSWorkspace Notification %s\" % event\n handler.callable = get_callable_for_event(event, event_config, context=handler.name)\n\n assert(callable(handler.onNotification_))\n\n notification_center.addObserver_selector_name_object_(handler, \"onNotification:\", event, None)\n WORKSPACE_HANDLERS[event] = handler\n log_list(\"Listening for these NSWorkspace notifications: %s\", nsw_config.keys())\n\n\ndef add_sc_notifications(sc_config):\n \"\"\"\n This uses the SystemConfiguration framework to get a SCDynamicStore session\n and register for certain events. See the Apple SystemConfiguration\n documentation for details:\n\n \n\n TN1145 may also be of interest:\n \n\n Inspired by the PyObjC SystemConfiguration callback demos:\n \n \"\"\"\n\n keys = sc_config.keys()\n\n try:\n for key in keys:\n SC_HANDLERS[key] = get_callable_for_event(key, sc_config[key], context=\"SystemConfiguration: %s\" % key)\n except AttributeError, exc:\n print >> sys.stderr, \"Error configuring SystemConfiguration events: %s\" % exc\n sys.exit(1)\n\n store = get_sc_store()\n\n SCDynamicStoreSetNotificationKeys(store, None, keys)\n\n # Get a CFRunLoopSource for our store session and add it to the application's runloop:\n CFRunLoopAddSource(\n NSRunLoop.currentRunLoop().getCFRunLoop(),\n SCDynamicStoreCreateRunLoopSource(None, store, 0),\n kCFRunLoopCommonModes\n )\n\n log_list(\"Listening for these SystemConfiguration events: %s\", keys)\n\n\ndef add_fs_notifications(fs_config):\n for path in fs_config:\n add_fs_notification(path, get_callable_for_event(path, fs_config[path], context=\"FSEvent: %s\" % path))\n\n\ndef add_fs_notification(f_path, callback):\n \"\"\"Adds an FSEvent notification for the specified path\"\"\"\n path = os.path.realpath(os.path.expanduser(f_path))\n if not os.path.exists(path):\n raise AttributeError(\"Cannot add an FSEvent notification: %s does not exist!\" % path)\n\n if not os.path.isdir(path):\n path = os.path.dirname(path)\n\n try:\n FS_WATCHED_FILES[path].append(callback)\n except KeyError:\n FS_WATCHED_FILES[path] = [callback]\n\n\ndef start_fs_events():\n stream_ref = FSEventStreamCreate(\n None, # Use the default CFAllocator\n fsevent_callback,\n None, # We don't need a FSEventStreamContext\n FS_WATCHED_FILES.keys(),\n kFSEventStreamEventIdSinceNow, # We only want events which happen in the future\n 1.0, # Process events within 1 second\n 0 # We don't need any special flags for our stream\n )\n\n if not stream_ref:\n raise RuntimeError(\"FSEventStreamCreate() failed!\")\n\n FSEventStreamScheduleWithRunLoop(stream_ref, NSRunLoop.currentRunLoop().getCFRunLoop(), kCFRunLoopDefaultMode)\n\n if not FSEventStreamStart(stream_ref):\n raise RuntimeError(\"Unable to start FSEvent stream!\")\n\n logging.debug(\"FSEventStream started for %d paths: %s\" % (len(FS_WATCHED_FILES), \", \".join(FS_WATCHED_FILES)))\n\n\ndef fsevent_callback(stream_ref, full_path, event_count, paths, masks, ids):\n \"\"\"Process an FSEvent (consult the Cocoa docs) and call each of our handlers which monitors that path or a parent\"\"\"\n for i in range(event_count):\n path = os.path.dirname(paths[i])\n\n if masks[i] & kFSEventStreamEventFlagMustScanSubDirs:\n recursive = True\n\n if masks[i] & kFSEventStreamEventFlagUserDropped:\n logging.error(\"We were too slow processing FSEvents and some events were dropped\")\n recursive = True\n\n if masks[i] & kFSEventStreamEventFlagKernelDropped:\n logging.error(\"The kernel was too slow processing FSEvents and some events were dropped!\")\n recursive = True\n else:\n recursive = False\n\n for i in [k for k in FS_WATCHED_FILES if path.startswith(k)]:\n logging.debug(\"FSEvent: %s: processing %d callback(s) for path %s\" % (i, len(FS_WATCHED_FILES[i]), path))\n for j in FS_WATCHED_FILES[i]:\n j(i, path=path, recursive=recursive)\n\n\ndef timer_callback(*args):\n \"\"\"Handles the timer events which we use simply to have the runloop run regularly. Currently this logs a timestamp for debugging purposes\"\"\"\n logging.debug(\"timer callback at %s\" % datetime.now())\n\n\ndef main():\n configure_logging()\n\n global CRANKD_OPTIONS, CRANKD_CONFIG\n CRANKD_OPTIONS = process_commandline()\n CRANKD_CONFIG = load_config(CRANKD_OPTIONS)\n\n if \"NSWorkspace\" in CRANKD_CONFIG:\n add_workspace_notifications(CRANKD_CONFIG['NSWorkspace'])\n\n if \"SystemConfiguration\" in CRANKD_CONFIG:\n add_sc_notifications(CRANKD_CONFIG['SystemConfiguration'])\n\n if \"FSEvents\" in CRANKD_CONFIG:\n add_fs_notifications(CRANKD_CONFIG['FSEvents'])\n\n # We reuse our FSEvents code to watch for changes to our files and\n # restart if any of our libraries have been updated:\n add_conditional_restart(CRANKD_OPTIONS.config_file, \"Configuration file %s changed\" % CRANKD_OPTIONS.config_file)\n for m in filter(lambda i: i and hasattr(i, '__file__'), sys.modules.values()):\n if m.__name__ == \"__main__\":\n msg = \"%s was updated\" % m.__file__\n else:\n msg = \"Module %s was updated\" % m.__name__\n\n add_conditional_restart(m.__file__, msg)\n\n signal.signal(signal.SIGHUP, partial(restart, \"SIGHUP received\"))\n\n start_fs_events()\n\n # NOTE: This timer is basically a kludge around the fact that we can't reliably get\n # signals or Control-C inside a runloop. This wakes us up often enough to\n # appear tolerably responsive:\n CFRunLoopAddTimer(\n NSRunLoop.currentRunLoop().getCFRunLoop(),\n CFRunLoopTimerCreate(None, CFAbsoluteTimeGetCurrent(), 2.0, 0, 0, timer_callback, None),\n kCFRunLoopCommonModes\n )\n\n try:\n AppHelper.runConsoleEventLoop(installInterrupt=True)\n except KeyboardInterrupt:\n logging.info(\"KeyboardInterrupt received, exiting\")\n\n sys.exit(0)\n\ndef create_env_name(name):\n \"\"\"\n Converts input names into more traditional shell environment name style\n\n >>> create_env_name(\"NSApplicationBundleIdentifier\")\n 'NSAPPLICATION_BUNDLE_IDENTIFIER'\n >>> create_env_name(\"NSApplicationBundleIdentifier-1234$foobar!\")\n 'NSAPPLICATION_BUNDLE_IDENTIFIER_1234_FOOBAR'\n \"\"\"\n new_name = re.sub(r'''(?<=[a-z])([A-Z])''', '_\\\\1', name)\n new_name = re.sub(r'\\W+', '_', new_name)\n new_name = re.sub(r'_{2,}', '_', new_name)\n return new_name.upper().strip(\"_\")\n\ndef do_shell(command, context=None, **kwargs):\n \"\"\"Executes a shell command with logging\"\"\"\n logging.info(\"%s: executing %s\" % (context, command))\n\n child_env = {'CRANKD_CONTEXT': context}\n\n # We'll pull a subset of the available information in for shell scripts.\n # Anyone who needs more will probably want to write a Python handler\n # instead so they can reuse things like our logger & config info and avoid\n # ordeals like associative arrays in Bash\n for k in [ 'info', 'key' ]:\n if k in kwargs and kwargs[k]:\n child_env['CRANKD_%s' % k.upper()] = str(kwargs[k])\n\n if 'user_info' in kwargs:\n for k, v in kwargs['user_info'].items():\n child_env[create_env_name(k)] = str(v)\n\n try:\n rc = call(command, shell=True, env=child_env)\n if rc == 0:\n logging.debug(\"`%s` returned %d\" % (command, rc))\n elif rc < 0:\n logging.error(\"`%s` was terminated by signal %d\" % (command, -rc))\n else:\n logging.error(\"`%s` returned %d\" % (command, rc))\n except OSError, exc:\n logging.error(\"Got an exception when executing %s:\" % (command, exc))\n\n\ndef add_conditional_restart(file_name, reason):\n \"\"\"FSEvents monitors directories, not files. This function uses stat to\n restart only if the file's mtime has changed\"\"\"\n file_name = os.path.realpath(file_name)\n while not os.path.exists(file_name):\n file_name = os.path.dirname(file_name)\n orig_stat = os.stat(file_name).st_mtime\n\n def cond_restart(*args, **kwargs):\n try:\n if os.stat(file_name).st_mtime != orig_stat:\n restart(reason)\n except (OSError, IOError, RuntimeError), exc:\n restart(\"Exception while checking %s: %s\" % (file_name, exc))\n\n add_fs_notification(file_name, cond_restart)\n\n\ndef restart(reason, *args, **kwargs):\n \"\"\"Perform a complete restart of the current process using exec()\"\"\"\n logging.info(\"Restarting: %s\" % reason)\n os.execv(sys.argv[0], sys.argv)\n\nif __name__ == '__main__':\n main()\n"}}
-{"repo": "creativedutchmen/Root-Page-Params", "pr_number": 1, "title": "lil spelling correction", "state": "closed", "merged_at": null, "additions": 1, "deletions": 1, "files_changed": ["extension.driver.php"], "files_before": {"extension.driver.php": " 'Root page params',\n\t\t\t\t\t\t 'version' => '1.2',\n\t\t\t\t\t\t 'release-date' => '2009-12-10',\n\t\t\t\t\t\t 'author' => array('name' => 'Huib Keemink',\n\t\t\t\t\t\t\t\t\t\t 'website' => 'http://www.creativedutchmen.com',\n\t\t\t\t\t\t\t\t\t\t 'email' => 'huib@creativedutchmen.com')\n\t\t\t\t \t\t);\n\t\t}\n\t\t\n\t\tpublic function getSubscribedDelegates(){\n\t\t\treturn array(\n\t\t\t\t\t\tarray(\n\t\t\t\t\t\t\t'page' => '/frontend/',\n\t\t\t\t\t\t\t'delegate' => 'FrontendPrePageResolve',\n\t\t\t\t\t\t\t'callback' => 'addPage'\n\t\t\t\t\t\t),\n\t\t\t\t\t\tarray(\n\t\t\t\t\t\t\t'page' => '/system/preferences/',\n\t\t\t\t\t\t\t'delegate' => 'AddCustomPreferenceFieldsets',\n\t\t\t\t\t\t\t'callback' => 'append_preferences'\n\t\t\t\t\t\t),\n\t\t\t\t\t\tarray(\n\t\t\t\t\t\t\t'page' => '/system/preferences/',\n\t\t\t\t\t\t\t'delegate' => 'Save',\n\t\t\t\t\t\t\t'callback' => 'save_settings'\n\t\t\t\t\t\t),\n\t\t\t);\n\t\t}\n\t\t\n\t\tpublic function addPage(&$context){\n\t\t\n\t\t\t//to prevent the callback loop\n\t\t\tif(!$this->alreadyRan){\n\t\t\t\t$this->alreadyRan = true;\n\t\t\t\t//the only way to access the current (active) pages.\n\t\t\t\t$front = FrontEnd::Page();\n\t\t\t\t\n\t\t\t\tif(!$front->resolvePage($context['page'])){\n\t\t\t\t\t//uses home page if no page is set in the config panel.\n\t\t\t\t\tif($this->_get_fallback() == ''){\n\t\t\t\t\t\t$indexPage = $this->__getIndexPage();\n\t\t\t\t\t\t$indexHandle = $indexPage['handle'];\n\t\t\t\t\t}\n\t\t\t\t\telse{\n\t\t\t\t\t\t$indexHandle = $this->_get_fallback();\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t//adds the home page to the handle, if the current page is not found.\n\t\t\t\t\t//requires the home page to fallback to a 404 if the params do not match, otherwise no 404 error will ever be created.\n\t\t\t\t\t\n\t\t\t\t\t$params = $context['page'];\n\t\t\t\t\t\n\t\t\t\t\tif($this->_Parent->Configuration->get('map_sub_to_front', 'maptofront') == 'no'){\n\t\t\t\t\t\t$tmp = substr($indexHandle,0, strrpos($indexHandle, '/'));\n\t\t\t\t\t\tif(strlen($tmp) > 0){\n\t\t\t\t\t\t\t$params = substr($context['page'], strpos($context['page'], $tmp)+strlen($tmp));\n\t\t\t\t\t\t}\n\t\t\t\t\t\telse{\n\t\t\t\t\t\t\t$params = '';\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t$context['page'] = $indexHandle.$params;\n\t\t\t\t}\n\t\t\t}\n\t\t\t\n\t\t}\n\t\t\n\t\t\n\t\tpublic function append_preferences(&$context)\n\t\t{\n\t\t\t# Add new fieldset\n\t\t\t$group = new XMLElement('fieldset');\n\t\t\t$group->setAttribute('class', 'settings');\n\t\t\t$group->appendChild(new XMLElement('legend', 'Root Page Params'));\n\n\t\t\t# Add Site Reference field\n\t\t\t//$label = Widget::Label('Fallback page');\n\t\t\t//$label->appendChild(Widget::Input('settings[maptofront][fallback]', General::Sanitize($this->_get_fallback())));\n\t\t\t\n\t\t\t//try to add a select box for the page (more user friendly)\n\t\t\t$label = Widget::Label(__('Page to append parameters to'));\n\t\t\t\n\t\t\t$pages = $this->_Parent->Database->fetch(\"\n\t\t\t\tSELECT\n\t\t\t\t\tp.*\n\t\t\t\tFROM\n\t\t\t\t\t`tbl_pages` AS p\n\t\t\t\tWHERE\n\t\t\t\t\tp.id != '{mysql_real_escape_string($page_id)}'\n\t\t\t\tORDER BY\n\t\t\t\t\tp.title ASC\n\t\t\t\");\n\t\t\t\n\t\t\t$options = array(\n\t\t\t\tarray('', false, '')\n\t\t\t);\n\t\t\t\n\t\t\tif (is_array($pages) && !empty($pages)) {\n\t\t\t\tif (!function_exists('__compare_pages')) {\n\t\t\t\t\tfunction __compare_pages($a, $b) {\n\t\t\t\t\t\treturn strnatcasecmp($a[2], $b[2]);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tforeach ($pages as $page) {\n\t\t\t\t\t$options[] = array(\n\t\t\t\t\t\t$this->_Parent->resolvePagePath($page['id']), $this->_Parent->Configuration->get('fallback', 'maptofront') == $this->_Parent->resolvePagePath($page['id']),\n\t\t\t\t\t\t'/'.$this->_Parent->resolvePagePath($page['id'])\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tusort($options, '__compare_pages');\n\t\t\t}\n\t\t\t\n\t\t\t$label->appendChild(Widget::Select(\n\t\t\t\t'settings[maptofront][fallback]', $options\n\t\t\t));\n\t\t\t\n\t\t\t$group->appendChild($label);\n\t\t\t$group->appendChild(new XMLElement('p', 'The page to append the parameters to. Leave empty for home (default).', array('class' => 'help')));\n\t\t\t\n\t\t\t$label = Widget::Label();\n\t\t\t$input = Widget::Input('settings[maptofront][map_sub_to_front]', 'yes', 'checkbox');\n\t\t\tif($this->_Parent->Configuration->get('map_sub_to_front', 'maptofront') == 'yes') $input->setAttribute('checked', 'checked');\n\t\t\t$label->setValue($input->generate() . ' ' . __('Map supages to home page'));\n\t\t\t\n\t\t\t$group->appendChild($label);\n\t\t\t$group->appendChild(new XMLElement('p', 'Maps subpages to the root page when checked, maps subpages to their parents if unchecked.', array('class' => 'help')));\n\t\t\t\n\t\t\t$context['wrapper']->appendChild($group);\n\t\t}\n\t\t\n\t\t//any way to get this without using the database?\n\t\tfunction __getIndexPage(){\n\t\t\t$row = $this->_Parent->Database->fetchRow(0, \"SELECT `tbl_pages`.* FROM `tbl_pages`, `tbl_pages_types` \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t WHERE `tbl_pages_types`.page_id = `tbl_pages`.id \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t AND tbl_pages_types.`type` = 'index' \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LIMIT 1\");\n\t\t\treturn $row;\n\t\t}\n\t\t\n\t\tfunction _get_fallback(){\n\t\t\t$default_fallback = '';\n\t\t\t$val = $this->_Parent->Configuration->get('fallback', 'maptofront');\n\t\t\t\n\t\t\treturn (isset($val)) ? $val : $default_fallback;\n\t\t}\n\t\t\n\t\tfunction save_settings($context){\n\t\t\tif(!isset($context['settings']['maptofront']['map_sub_to_front'])) $context['settings']['maptofront']['map_sub_to_front'] = 'no';\n\t\t\t\n\t\t\tif(!isset($context['settings']['maptofront'])){\n\t\t\t\t$context['settings']['maptofront'] = array('map_sub_to_front' => 'no');\n\t\t\t}\n\t\t}\n\t}\n"}, "files_after": {"extension.driver.php": " 'Root page params',\n\t\t\t\t\t\t 'version' => '1.2',\n\t\t\t\t\t\t 'release-date' => '2009-12-10',\n\t\t\t\t\t\t 'author' => array('name' => 'Huib Keemink',\n\t\t\t\t\t\t\t\t\t\t 'website' => 'http://www.creativedutchmen.com',\n\t\t\t\t\t\t\t\t\t\t 'email' => 'huib@creativedutchmen.com')\n\t\t\t\t \t\t);\n\t\t}\n\t\t\n\t\tpublic function getSubscribedDelegates(){\n\t\t\treturn array(\n\t\t\t\t\t\tarray(\n\t\t\t\t\t\t\t'page' => '/frontend/',\n\t\t\t\t\t\t\t'delegate' => 'FrontendPrePageResolve',\n\t\t\t\t\t\t\t'callback' => 'addPage'\n\t\t\t\t\t\t),\n\t\t\t\t\t\tarray(\n\t\t\t\t\t\t\t'page' => '/system/preferences/',\n\t\t\t\t\t\t\t'delegate' => 'AddCustomPreferenceFieldsets',\n\t\t\t\t\t\t\t'callback' => 'append_preferences'\n\t\t\t\t\t\t),\n\t\t\t\t\t\tarray(\n\t\t\t\t\t\t\t'page' => '/system/preferences/',\n\t\t\t\t\t\t\t'delegate' => 'Save',\n\t\t\t\t\t\t\t'callback' => 'save_settings'\n\t\t\t\t\t\t),\n\t\t\t);\n\t\t}\n\t\t\n\t\tpublic function addPage(&$context){\n\t\t\n\t\t\t//to prevent the callback loop\n\t\t\tif(!$this->alreadyRan){\n\t\t\t\t$this->alreadyRan = true;\n\t\t\t\t//the only way to access the current (active) pages.\n\t\t\t\t$front = FrontEnd::Page();\n\t\t\t\t\n\t\t\t\tif(!$front->resolvePage($context['page'])){\n\t\t\t\t\t//uses home page if no page is set in the config panel.\n\t\t\t\t\tif($this->_get_fallback() == ''){\n\t\t\t\t\t\t$indexPage = $this->__getIndexPage();\n\t\t\t\t\t\t$indexHandle = $indexPage['handle'];\n\t\t\t\t\t}\n\t\t\t\t\telse{\n\t\t\t\t\t\t$indexHandle = $this->_get_fallback();\n\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t//adds the home page to the handle, if the current page is not found.\n\t\t\t\t\t//requires the home page to fallback to a 404 if the params do not match, otherwise no 404 error will ever be created.\n\t\t\t\t\t\n\t\t\t\t\t$params = $context['page'];\n\t\t\t\t\t\n\t\t\t\t\tif($this->_Parent->Configuration->get('map_sub_to_front', 'maptofront') == 'no'){\n\t\t\t\t\t\t$tmp = substr($indexHandle,0, strrpos($indexHandle, '/'));\n\t\t\t\t\t\tif(strlen($tmp) > 0){\n\t\t\t\t\t\t\t$params = substr($context['page'], strpos($context['page'], $tmp)+strlen($tmp));\n\t\t\t\t\t\t}\n\t\t\t\t\t\telse{\n\t\t\t\t\t\t\t$params = '';\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t$context['page'] = $indexHandle.$params;\n\t\t\t\t}\n\t\t\t}\n\t\t\t\n\t\t}\n\t\t\n\t\t\n\t\tpublic function append_preferences(&$context)\n\t\t{\n\t\t\t# Add new fieldset\n\t\t\t$group = new XMLElement('fieldset');\n\t\t\t$group->setAttribute('class', 'settings');\n\t\t\t$group->appendChild(new XMLElement('legend', 'Root Page Params'));\n\n\t\t\t# Add Site Reference field\n\t\t\t//$label = Widget::Label('Fallback page');\n\t\t\t//$label->appendChild(Widget::Input('settings[maptofront][fallback]', General::Sanitize($this->_get_fallback())));\n\t\t\t\n\t\t\t//try to add a select box for the page (more user friendly)\n\t\t\t$label = Widget::Label(__('Page to append parameters to'));\n\t\t\t\n\t\t\t$pages = $this->_Parent->Database->fetch(\"\n\t\t\t\tSELECT\n\t\t\t\t\tp.*\n\t\t\t\tFROM\n\t\t\t\t\t`tbl_pages` AS p\n\t\t\t\tWHERE\n\t\t\t\t\tp.id != '{mysql_real_escape_string($page_id)}'\n\t\t\t\tORDER BY\n\t\t\t\t\tp.title ASC\n\t\t\t\");\n\t\t\t\n\t\t\t$options = array(\n\t\t\t\tarray('', false, '')\n\t\t\t);\n\t\t\t\n\t\t\tif (is_array($pages) && !empty($pages)) {\n\t\t\t\tif (!function_exists('__compare_pages')) {\n\t\t\t\t\tfunction __compare_pages($a, $b) {\n\t\t\t\t\t\treturn strnatcasecmp($a[2], $b[2]);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tforeach ($pages as $page) {\n\t\t\t\t\t$options[] = array(\n\t\t\t\t\t\t$this->_Parent->resolvePagePath($page['id']), $this->_Parent->Configuration->get('fallback', 'maptofront') == $this->_Parent->resolvePagePath($page['id']),\n\t\t\t\t\t\t'/'.$this->_Parent->resolvePagePath($page['id'])\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tusort($options, '__compare_pages');\n\t\t\t}\n\t\t\t\n\t\t\t$label->appendChild(Widget::Select(\n\t\t\t\t'settings[maptofront][fallback]', $options\n\t\t\t));\n\t\t\t\n\t\t\t$group->appendChild($label);\n\t\t\t$group->appendChild(new XMLElement('p', 'The page to append the parameters to. Leave empty for home (default).', array('class' => 'help')));\n\t\t\t\n\t\t\t$label = Widget::Label();\n\t\t\t$input = Widget::Input('settings[maptofront][map_sub_to_front]', 'yes', 'checkbox');\n\t\t\tif($this->_Parent->Configuration->get('map_sub_to_front', 'maptofront') == 'yes') $input->setAttribute('checked', 'checked');\n\t\t\t$label->setValue($input->generate() . ' ' . __('Map subpages to home page'));\n\t\t\t\n\t\t\t$group->appendChild($label);\n\t\t\t$group->appendChild(new XMLElement('p', 'Maps subpages to the root page when checked, maps subpages to their parents if unchecked.', array('class' => 'help')));\n\t\t\t\n\t\t\t$context['wrapper']->appendChild($group);\n\t\t}\n\t\t\n\t\t//any way to get this without using the database?\n\t\tfunction __getIndexPage(){\n\t\t\t$row = $this->_Parent->Database->fetchRow(0, \"SELECT `tbl_pages`.* FROM `tbl_pages`, `tbl_pages_types` \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t WHERE `tbl_pages_types`.page_id = `tbl_pages`.id \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t AND tbl_pages_types.`type` = 'index' \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LIMIT 1\");\n\t\t\treturn $row;\n\t\t}\n\t\t\n\t\tfunction _get_fallback(){\n\t\t\t$default_fallback = '';\n\t\t\t$val = $this->_Parent->Configuration->get('fallback', 'maptofront');\n\t\t\t\n\t\t\treturn (isset($val)) ? $val : $default_fallback;\n\t\t}\n\t\t\n\t\tfunction save_settings($context){\n\t\t\tif(!isset($context['settings']['maptofront']['map_sub_to_front'])) $context['settings']['maptofront']['map_sub_to_front'] = 'no';\n\t\t\t\n\t\t\tif(!isset($context['settings']['maptofront'])){\n\t\t\t\t$context['settings']['maptofront'] = array('map_sub_to_front' => 'no');\n\t\t\t}\n\t\t}\n\t}\n"}}
-{"repo": "fgrehm/qmlunit", "pr_number": 1, "title": "Mergebranch", "state": "closed", "merged_at": null, "additions": 106, "deletions": 7, "files_changed": ["qmlunittestrunner.cpp", "scripts/QUnitTestXmlLogger.js"], "files_before": {"qmlunittestrunner.cpp": "#include \"qmlunittestrunner.h\"\n#include \"qmllogger.h\"\n#include \n#include \n#include \n\nQmlUnitTestRunner::QmlUnitTestRunner(QApplication *app) :\n QObject(app)\n{\n this->app = app;\n\n QStringList args = app->arguments();\n // Skips executable\n args.removeAt(0);\n\n if (args.count() == 0) {\n qDebug() << \"No arguments passed, failing back to qmlunit Test Suite (\" << (app->applicationDirPath() + \"/test\") << \")\\n\";\n args = QStringList(app->applicationDirPath() + \"/test\");\n }\n\n QStringListIterator i(args);\n while(i.hasNext())\n findTests(i.next());\n\n i = QStringListIterator(tests);\n qDebug() << \"Tests files found:\";\n while(i.hasNext()) {\n QString currentArg = i.next();\n qDebug() << \"\\t\" << currentArg;\n }\n}\n\nvoid QmlUnitTestRunner::setup(){\n QmlLogger *logger = new QmlLogger(app->applicationDirPath(), this);\n\n QDeclarativeEngine *engine = logger->engine();\n\n engine->setOfflineStoragePath(QDir::currentPath() + \"/storage\");\n engine->addImportPath(QDir::currentPath() + \"/QmlUnit\");\n engine->rootContext()->setContextProperty(\"testsInput\", tests);\n engine->rootContext()->setContextProperty(\"currentPath\", QDir::currentPath());\n\n logger->setup();\n}\n\nint QmlUnitTestRunner::exec() {\n setup();\n return app->exec();\n}\n\nvoid QmlUnitTestRunner::findTests(QString path) {\n if (isTest(path)) {\n tests << QDir(path).absolutePath();\n return;\n }\n\n QStringList filters; filters << \"*\";\n QDir dir = QDir(QDir(path).absolutePath());\n\n QListIterator files(dir.entryInfoList(filters, QDir::AllEntries | QDir::NoDotAndDotDot));\n while(files.hasNext()) {\n QFileInfo file = files.next();\n if (file.fileName() == \".\" || file.fileName() == \"..\") continue;\n\n if (isTest(file))\n tests << file.absoluteFilePath();\n else if (isDir(file))\n findTests(file.absoluteFilePath());\n }\n}\n\nbool QmlUnitTestRunner::isTest(QFileInfo file){\n return isTest(file.fileName());\n}\n\nbool QmlUnitTestRunner::isTest(QString filePath){\n return filePath.endsWith(\"Test.qml\");\n}\n\nbool QmlUnitTestRunner::isDir(QFileInfo file){\n return QDir(file.absoluteFilePath()).exists();\n}\n"}, "files_after": {"qmlunittestrunner.cpp": "#include \"qmlunittestrunner.h\"\n#include \"qmllogger.h\"\n#include \n#include \n#include \n\nQmlUnitTestRunner::QmlUnitTestRunner(QApplication *app) :\n QObject(app)\n{\n this->app = app;\n\n QStringList args = app->arguments();\n // Skips executable\n args.removeAt(0);\n \n // Parse possible xmlUnitOutput arg\n bool xmlOutput = false;\n \n if ((args.count() > 0) && args.contains(\"-xml\")) {\n xmlOutput = true;\n args.removeAt(args.indexOf(\"-xml\"));\n }\n \n if (args.count() == 0) {\n // Surpress all non-xml output if xml output was requested\n if (!xmlOutput) \n qDebug() << \"No arguments passed, failing back to qmlunit Test Suite (\" << (app->applicationDirPath() + \"/test\") << \")\\n\";\n args = QStringList(app->applicationDirPath() + \"/test\");\n }\n\n QStringListIterator i(args);\n while(i.hasNext())\n findTests(i.next());\n\n i = QStringListIterator(tests);\n \n // Surpress all non-xml output if xml output was requested\n if (!xmlOutput) {\n qDebug() << \"Tests files found:\";\n while(i.hasNext()) {\n QString currentArg = i.next();\n qDebug() << \"\\t\" << currentArg;\n }\n }\n}\n\nvoid QmlUnitTestRunner::setup(){\n QmlLogger *logger = new QmlLogger(app->applicationDirPath(), this);\n\n QDeclarativeEngine *engine = logger->engine();\n\n engine->setOfflineStoragePath(QDir::currentPath() + \"/storage\");\n engine->addImportPath(QDir::currentPath() + \"/QmlUnit\");\n engine->rootContext()->setContextProperty(\"testsInput\", tests);\n engine->rootContext()->setContextProperty(\"currentPath\", QDir::currentPath());\n\n logger->setup();\n}\n\nint QmlUnitTestRunner::exec() {\n setup();\n return app->exec();\n}\n\nvoid QmlUnitTestRunner::findTests(QString path) {\n if (isTest(path)) {\n tests << QDir(path).absolutePath();\n return;\n }\n\n QStringList filters; filters << \"*\";\n QDir dir = QDir(QDir(path).absolutePath());\n\n QListIterator files(dir.entryInfoList(filters, QDir::AllEntries | QDir::NoDotAndDotDot));\n while(files.hasNext()) {\n QFileInfo file = files.next();\n if (file.fileName() == \".\" || file.fileName() == \"..\") continue;\n\n if (isTest(file))\n tests << file.absoluteFilePath();\n else if (isDir(file))\n findTests(file.absoluteFilePath());\n }\n}\n\nbool QmlUnitTestRunner::isTest(QFileInfo file){\n return isTest(file.fileName());\n}\n\nbool QmlUnitTestRunner::isTest(QString filePath){\n return filePath.endsWith(\"Test.qml\");\n}\n\nbool QmlUnitTestRunner::isDir(QFileInfo file){\n return QDir(file.absoluteFilePath()).exists();\n}\n", "scripts/QUnitTestXmlLogger.js": "var currentTestSuite;\nvar testSuites = []; \n\nfunction addTestSuite(name) {\n\n currentTestSuite = {\n name: name,\n errors: 0,\n testsTotal: 0,\n time: 0.0, \n failures: 0,\n tests: []\n };\n testSuites.push(currentTestSuite)\n}\n\nfunction parseFailureMessage(assertions) {\n var message = \"\"\n for (var i=0; i\")\n logger.log(\"\")\n \n testSuites.forEach(function(testSuite) {\n logger.log(\" \")\n \n testSuite.tests.forEach(function(test) {\n if (test.failures == 0) { \n logger.log(\" \")\n } \n else {\n logger.log(\" \")\n logger.log(\" \")\n logger.log(\" \")\n } \n });\n \n logger.log(\" \")\n });\n \n logger.log(\"\")\n}\n\n\n"}}
-{"repo": "SunboX/mootools-fx-text", "pr_number": 1, "title": "fx-text for the pack", "state": "closed", "merged_at": "2010-10-02T17:41:23Z", "additions": 82, "deletions": 55, "files_changed": ["Source/Element.retype.js", "Source/Fx.Text.js"], "files_before": {"Source/Element.retype.js": "/*\n---\n \nname: Element.retype\n \ndescription: Effect to animated replace the text of an element.\n\nauthors: Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n\ncopyright: Copyright (c) 2010 Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n \nlicense: MIT-style license.\n\nversion: 1.0\n \nrequires: Fx.Text\n \nprovides: Element.retype\n \n...\n*/\n\nElement.Properties.retype = {\r\n\r\n set: function(options){\r\n var retype = this.retrieve('retype');\r\n if (retype) \r\n retype.cancel();\r\n return this.eliminate('retype').store('retype:options', $extend({\r\n link: 'cancel'\r\n }, options));\r\n },\r\n \r\n get: function(options){\r\n if (options || !this.retrieve('retype')) {\r\n if (options || !this.retrieve('retype:options')) \r\n this.set('retype', options);\r\n this.store('retype', new Fx.Text(this, this.retrieve('retype:options')));\r\n }\r\n return this.retrieve('retype');\r\n } \r\n};\r\n\r\nElement.implement({\r\n\r\n retype: function(from, to){\r\n this.get('retype').start(from, to);\r\n return this;\r\n }\r\n});", "Source/Fx.Text.js": "/*\n---\n \nname: Fx.Text\n \ndescription: Effect to animated replace the text of an element.\n\nauthors: Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n\ncopyright: Copyright (c) 2010 Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n \nlicense: MIT-style license.\n\nversion: 1.2.1\n \nrequires: Core/1.2.4: Fx\n \nprovides: Fx.Text\n \n...\n*/\n\nFx.Text = new Class({\r\n\r\n Extends: Fx,\r\n \r\n initialize: function(element, options){\r\n this.element = this.subject = document.id(element);\r\n this.parent(options);\r\n },\r\n \r\n set: function(now){\r\n this.element.set('text', now);\r\n return this;\r\n },\r\n \r\n step: function(){\r\n if (!this.to) {\r\n this.to = this.from;\r\n this.from = this.element.get('text', '');\r\n }\r\n return this.parent();\r\n },\r\n \r\n compute: function(from, to, delta){\r\n var l = Math.round(to.length * delta);\r\n var r = Math.round((from.length - to.length) * delta);\r\n return to.substr(0, l) + from.substr(l, from.length - l - r);\r\n }\r\n});\n"}, "files_after": {"Source/Element.retype.js": "/*\n---\n \nname: Element.retype\n \ndescription: Effect to animated replace the text of an element.\n\nauthors: Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n\ncopyright: Copyright (c) 2010 Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n \nlicense: MIT-style license.\n\nversion: 1.0\n \nrequires:\n - Fx.Text\n \nprovides: Element.retype\n \n...\n*/\n\nElement.Properties.retype = {\r\n\r\n\tset: function(options){\r\n\t\tvar retype = this.retrieve('retype');\r\n\t\tif (retype)\r\n\t\t\tretype.cancel();\r\n\t\treturn this.eliminate('retype').store('retype:options', $extend({\r\n\t\t\tlink: 'cancel'\r\n\t\t}, options));\r\n\t},\r\n\t\r\n\tget: function(options){\r\n\t\tif (options || !this.retrieve('retype')){\r\n\t\t\tif (options || !this.retrieve('retype:options'))\r\n\t\t\t\tthis.set('retype', options);\r\n\t\t\tthis.store('retype', new Fx.Text(this, this.retrieve('retype:options')));\r\n\t\t}\r\n\t\treturn this.retrieve('retype');\r\n\t}\n\t\r\n};\r\n\r\nElement.implement({\r\n\r\n\tretype: function(from, to){\r\n\t\tthis.get('retype').start(from, to);\r\n\t\treturn this;\r\n\t}\n\t\r\n});\n", "Source/Fx.Text.js": "/*\n---\n \nname: Fx.Text\n \ndescription: Effect to animated replace the text of an element.\n\nauthors: Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n\ncopyright: Copyright (c) 2010 Dipl.-Ing. (FH) Andr\u00e9 Fiedler \n \nlicense: MIT-style license.\n\nversion: 1.2.1\n \nrequires: \n - Core/Element\n - Core/Fx\n \nprovides: Fx.Text\n \n...\n*/\n\nFx.Text = new Class({\r\n\r\n\tExtends: Fx,\r\n\t\r\n\tinitialize: function(element, options){\r\n\t\tthis.element = this.subject = document.id(element);\r\n\t\tthis.parent(options);\r\n\t},\r\n\t\r\n\tset: function(now){\r\n\t\tthis.element.set('text', now);\r\n\t\treturn this;\r\n\t},\r\n\t\r\n\tstep: function(){\r\n\t\tif (!this.to){\r\n\t\t\tthis.to = this.from;\r\n\t\t\tthis.from = this.element.get('text', '');\r\n\t\t}\r\n\t\treturn this.parent();\r\n\t},\r\n\t\r\n\tcompute: function(from, to, delta){\r\n\t\tvar l = Math.round(to.length * delta),\n\t\t\tr = Math.round((from.length - to.length) * delta);\r\n\t\treturn to.substr(0, l) + from.substr(l, from.length - l - r);\r\n\t}\n\t\r\n});\n"}}
-{"repo": "seam/servlet", "pr_number": 5, "title": "SEAMSERVLET-32", "state": "closed", "merged_at": "2011-03-25T05:19:21Z", "additions": 12, "deletions": 11, "files_changed": ["impl/src/main/java/org/jboss/seam/servlet/event/ImplicitServletObjectsHolder.java"], "files_before": {"impl/src/main/java/org/jboss/seam/servlet/event/ImplicitServletObjectsHolder.java": "/*\n * JBoss, Home of Professional Open Source\n * Copyright 2010, Red Hat Middleware LLC, and individual contributors\n * by the @authors tag. See the copyright.txt in the distribution for a\n * full listing of individual contributors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * http://www.apache.org/licenses/LICENSE-2.0\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.jboss.seam.servlet.event;\n\nimport javax.enterprise.context.ApplicationScoped;\nimport javax.enterprise.event.Observes;\nimport javax.enterprise.inject.spi.BeanManager;\nimport javax.inject.Inject;\nimport javax.servlet.ServletContext;\nimport javax.servlet.ServletRequest;\nimport javax.servlet.ServletResponse;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport javax.servlet.http.HttpSession;\n\nimport org.jboss.seam.servlet.ServletRequestContext;\nimport org.jboss.seam.servlet.beanManager.ServletContextAttributeProvider;\nimport org.jboss.seam.servlet.http.HttpServletRequestContext;\nimport org.jboss.seam.servlet.support.ServletLogger;\nimport org.jboss.seam.solder.logging.Category;\n\n/**\n * A manager for tracking the contextual Servlet objects, specifically the {@link ServletContext}, {@link HttpServletRequest}\n * and {@link HttpServletResponse}.\n * \n * @author Dan Allen\n */\n@ApplicationScoped\npublic class ImplicitServletObjectsHolder {\n @Inject @Category(ServletLogger.CATEGORY)\n private ServletLogger log;\n\n private ServletContext servletCtx;\n\n private final ThreadLocal requestCtx = new InheritableThreadLocal() {\n @Override\n protected ServletRequestContext initialValue() {\n return null;\n }\n };\n\n protected void contextInitialized(@Observes @Initialized final InternalServletContextEvent e, BeanManager beanManager) {\n ServletContext ctx = e.getServletContext();\n log.servletContextInitialized(ctx);\n ctx.setAttribute(BeanManager.class.getName(), beanManager);\n ServletContextAttributeProvider.setServletContext(ctx);\n servletCtx = ctx;\n }\n\n protected void contextDestroyed(@Observes @Destroyed final InternalServletContextEvent e) {\n log.servletContextDestroyed(e.getServletContext());\n servletCtx = null;\n }\n\n protected void requestInitialized(@Observes @Initialized final InternalServletRequestEvent e) {\n ServletRequest req = e.getServletRequest();\n log.servletRequestInitialized(req);\n if (req instanceof HttpServletRequest) {\n requestCtx.set(new HttpServletRequestContext(req));\n } else {\n requestCtx.set(new ServletRequestContext(req));\n }\n }\n\n protected void requestDestroyed(@Observes @Destroyed final InternalServletRequestEvent e) {\n log.servletRequestDestroyed(e.getServletRequest());\n requestCtx.set(null);\n }\n\n protected void responseInitialized(@Observes @Initialized final InternalServletResponseEvent e) {\n ServletResponse res = e.getServletResponse();\n log.servletResponseInitialized(res);\n if (res instanceof HttpServletResponse) {\n requestCtx.set(new HttpServletRequestContext(requestCtx.get().getRequest(), res));\n } else {\n requestCtx.set(new ServletRequestContext(requestCtx.get().getRequest(), res));\n }\n }\n\n protected void responseDestroyed(@Observes @Destroyed final InternalServletResponseEvent e) {\n log.servletResponseDestroyed(e.getServletResponse());\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n requestCtx.set(new HttpServletRequestContext(requestCtx.get().getRequest()));\n } else {\n requestCtx.set(new ServletRequestContext(requestCtx.get().getRequest()));\n }\n }\n\n public ServletContext getServletContext() {\n return servletCtx;\n }\n\n public ServletRequestContext getServletRequestContext() {\n return requestCtx.get();\n }\n\n public HttpServletRequestContext getHttpServletRequestContext() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get());\n } else {\n return null;\n }\n }\n\n public ServletRequest getServletRequest() {\n if (requestCtx.get() != null) {\n return requestCtx.get().getRequest();\n } else {\n return null;\n }\n }\n\n public HttpServletRequest getHttpServletRequest() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get()).getRequest();\n } else {\n return null;\n }\n }\n\n public ServletResponse getServletResponse() {\n if (requestCtx.get() != null) {\n return requestCtx.get().getResponse();\n } else {\n return null;\n }\n }\n\n public HttpServletResponse getHttpServletResponse() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get()).getResponse();\n } else {\n return null;\n }\n }\n\n public HttpSession getHttpSession() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get()).getRequest().getSession();\n } else {\n return null;\n }\n }\n\n static class InternalServletContextEvent {\n private ServletContext ctx;\n\n InternalServletContextEvent(ServletContext ctx) {\n this.ctx = ctx;\n }\n\n public ServletContext getServletContext() {\n return ctx;\n }\n }\n\n static class InternalServletRequestEvent {\n private ServletRequest request;\n\n InternalServletRequestEvent(ServletRequest request) {\n this.request = request;\n }\n\n public ServletRequest getServletRequest() {\n return request;\n }\n }\n\n static class InternalServletResponseEvent {\n private ServletResponse response;\n\n InternalServletResponseEvent(ServletResponse response) {\n this.response = response;\n }\n\n public ServletResponse getServletResponse() {\n return response;\n }\n }\n\n static class InternalHttpSessionEvent {\n private HttpSession session;\n\n InternalHttpSessionEvent(HttpSession session) {\n this.session = session;\n }\n\n public HttpSession getHttpSession() {\n return session;\n }\n }\n}\n"}, "files_after": {"impl/src/main/java/org/jboss/seam/servlet/event/ImplicitServletObjectsHolder.java": "/*\n * JBoss, Home of Professional Open Source\n * Copyright 2010, Red Hat Middleware LLC, and individual contributors\n * by the @authors tag. See the copyright.txt in the distribution for a\n * full listing of individual contributors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * http://www.apache.org/licenses/LICENSE-2.0\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.jboss.seam.servlet.event;\n\nimport javax.enterprise.context.ApplicationScoped;\nimport javax.enterprise.event.Observes;\nimport javax.enterprise.inject.spi.BeanManager;\nimport javax.inject.Inject;\nimport javax.servlet.ServletContext;\nimport javax.servlet.ServletRequest;\nimport javax.servlet.ServletResponse;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport javax.servlet.http.HttpSession;\n\nimport org.jboss.seam.servlet.ServletRequestContext;\nimport org.jboss.seam.servlet.beanManager.ServletContextAttributeProvider;\nimport org.jboss.seam.servlet.http.HttpServletRequestContext;\nimport org.jboss.seam.servlet.support.ServletLogger;\nimport org.jboss.seam.solder.logging.Category;\n\n/**\n * A manager for tracking the contextual Servlet objects, specifically the {@link ServletContext}, {@link HttpServletRequest}\n * and {@link HttpServletResponse}.\n * \n * @author Dan Allen\n */\n@ApplicationScoped\npublic class ImplicitServletObjectsHolder {\n @Inject @Category(ServletLogger.CATEGORY)\n private ServletLogger log;\n\n private ServletContext servletCtx;\n\n private final ThreadLocal requestCtx = new ThreadLocal() {\n @Override\n protected ServletRequestContext initialValue() {\n return null;\n }\n };\n\n protected void contextInitialized(@Observes @Initialized final InternalServletContextEvent e, BeanManager beanManager) {\n ServletContext ctx = e.getServletContext();\n log.servletContextInitialized(ctx);\n ctx.setAttribute(BeanManager.class.getName(), beanManager);\n ServletContextAttributeProvider.setServletContext(ctx);\n servletCtx = ctx;\n }\n\n protected void contextDestroyed(@Observes @Destroyed final InternalServletContextEvent e) {\n log.servletContextDestroyed(e.getServletContext());\n servletCtx = null;\n }\n\n protected void requestInitialized(@Observes @Initialized final InternalServletRequestEvent e) {\n ServletRequest req = e.getServletRequest();\n log.servletRequestInitialized(req);\n if (req instanceof HttpServletRequest) {\n requestCtx.set(new HttpServletRequestContext(req));\n } else {\n requestCtx.set(new ServletRequestContext(req));\n }\n }\n\n protected void requestDestroyed(@Observes @Destroyed final InternalServletRequestEvent e) {\n log.servletRequestDestroyed(e.getServletRequest());\n requestCtx.set(null);\n }\n\n protected void responseInitialized(@Observes @Initialized final InternalServletResponseEvent e) {\n ServletResponse res = e.getServletResponse();\n log.servletResponseInitialized(res);\n if (res instanceof HttpServletResponse) {\n requestCtx.set(new HttpServletRequestContext(requestCtx.get().getRequest(), res));\n } else {\n requestCtx.set(new ServletRequestContext(requestCtx.get().getRequest(), res));\n }\n }\n\n protected void responseDestroyed(@Observes @Destroyed final InternalServletResponseEvent e) {\n log.servletResponseDestroyed(e.getServletResponse());\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n requestCtx.set(new HttpServletRequestContext(requestCtx.get().getRequest()));\n } else {\n requestCtx.set(new ServletRequestContext(requestCtx.get().getRequest()));\n }\n }\n\n public ServletContext getServletContext() {\n return servletCtx;\n }\n\n public ServletRequestContext getServletRequestContext() {\n return requestCtx.get();\n }\n\n public HttpServletRequestContext getHttpServletRequestContext() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get());\n } else {\n return null;\n }\n }\n\n public ServletRequest getServletRequest() {\n if (requestCtx.get() != null) {\n return requestCtx.get().getRequest();\n } else {\n return null;\n }\n }\n\n public HttpServletRequest getHttpServletRequest() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get()).getRequest();\n } else {\n return null;\n }\n }\n\n public ServletResponse getServletResponse() {\n if (requestCtx.get() != null) {\n return requestCtx.get().getResponse();\n } else {\n return null;\n }\n }\n\n public HttpServletResponse getHttpServletResponse() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get()).getResponse();\n } else {\n return null;\n }\n }\n\n public HttpSession getHttpSession() {\n if (requestCtx.get() instanceof HttpServletRequestContext) {\n return HttpServletRequestContext.class.cast(requestCtx.get()).getRequest().getSession();\n } else {\n return null;\n }\n }\n\n static class InternalServletContextEvent {\n private ServletContext ctx;\n\n InternalServletContextEvent(ServletContext ctx) {\n this.ctx = ctx;\n }\n\n public ServletContext getServletContext() {\n return ctx;\n }\n }\n\n static class InternalServletRequestEvent {\n private ServletRequest request;\n\n InternalServletRequestEvent(ServletRequest request) {\n this.request = request;\n }\n\n public ServletRequest getServletRequest() {\n return request;\n }\n }\n\n static class InternalServletResponseEvent {\n private ServletResponse response;\n\n InternalServletResponseEvent(ServletResponse response) {\n this.response = response;\n }\n\n public ServletResponse getServletResponse() {\n return response;\n }\n }\n\n static class InternalHttpSessionEvent {\n private HttpSession session;\n\n InternalHttpSessionEvent(HttpSession session) {\n this.session = session;\n }\n\n public HttpSession getHttpSession() {\n return session;\n }\n }\n}\n"}}
-{"repo": "otrtool/otrtool", "pr_number": 12, "title": "Logfilemode must not be set if guimode is enabled.", "state": "closed", "merged_at": "2016-06-05T10:16:17Z", "additions": 1, "deletions": 1, "files_changed": ["src/main.c"], "files_before": {"src/main.c": "#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\n#include \n\n#include \n#include \"md5.h\"\n\n#include \n#include \n\n#define ERROR(...) \\\n ({fprintf(stderr, \"\\n\"); \\\n fprintf(stderr, __VA_ARGS__); \\\n fprintf(stderr, \"\\n\"); \\\n exit(EXIT_FAILURE); })\n\n#define PERROR(...) \\\n ({fprintf(stderr, \"\\n\"); \\\n fprintf(stderr, __VA_ARGS__); \\\n fprintf(stderr, \": \"); \\\n perror(NULL); \\\n exit(EXIT_FAILURE); })\n\n#define MIN(a,b) ((a)<(b)?(a):(b))\n\n#ifndef VERSION\n #define VERSION \"version unknown\"\n#endif\n\n#define LINE_LENGTH 80\n#define MAX_RESPONSE_LENGTH 1000\n#define CREAT_MODE S_IWUSR|S_IRUSR|S_IRGRP|S_IROTH\n\n#define VERB_INFO 1\n#define VERB_DEBUG 2\n\n#define ACTION_INFO 1\n#define ACTION_FETCHKEY 2\n#define ACTION_DECRYPT 3\n#define ACTION_VERIFY 4\n\n/* global options as supplied by the user via command-line etc. */\nstruct otrtool_options {\n int action;\n int verbosity;\n int guimode; // do not output \\r and stuff\n int unlinkmode;\n char *email;\n char *password;\n char *keyphrase;\n char *destdir;\n char *destfile;\n};\nstatic struct otrtool_options opts = {\n .action = ACTION_INFO,\n .verbosity = VERB_INFO,\n .guimode = 0,\n .unlinkmode = 0,\n .email = NULL,\n .password = NULL,\n .keyphrase = NULL,\n .destdir = NULL,\n .destfile = NULL,\n};\n\nstatic int interactive = 1; // ask questions instead of exiting\nstatic int logfilemode = 0; // do not output progress bar\n\nstatic char *email = NULL;\nstatic char *password = NULL;\nstatic char *keyphrase = NULL;\nstatic char *filename = NULL;\nstatic char *destfilename = NULL;\n\nstatic FILE *file = NULL;\nstatic FILE *keyfile = NULL;\nstatic FILE *ttyfile = NULL;\nstatic char *header = NULL;\nstatic char *info = NULL;\n\n// ######################## curl-stuff #######################\n\nstruct MemoryStruct {\n char *memory;\n size_t size;\n};\n\nstatic size_t WriteMemoryCallback(void *ptr, size_t size,\n size_t nmemb, void *data) {\n size_t realsize = size * nmemb;\n struct MemoryStruct *mem = (struct MemoryStruct *)data;\n char *newmem;\n \n // abort very long transfers\n if (mem->size + realsize > MAX_RESPONSE_LENGTH) {\n realsize = mem->size <= MAX_RESPONSE_LENGTH\n ? MAX_RESPONSE_LENGTH - mem->size\n : 0;\n }\n if (realsize < 1) return 0;\n \n // \"If realloc() fails the original block is left untouched\" (man 3 realloc)\n newmem = realloc(mem->memory, mem->size + realsize);\n if (newmem != NULL) {\n mem->memory = newmem;\n memcpy(&(mem->memory[mem->size]), ptr, realsize);\n mem->size += realsize;\n } else return 0;\n return realsize;\n}\n\n// ######################## generic functions ####################\n\nchar * bin2hex(void *data_, int len) {\n unsigned char *data = data_;\n unsigned char *result = malloc(sizeof(char) * len * 2 + 1);\n result[len * 2] = 0;\n int foo;\n for (len-- ; len >= 0 ; len--) {\n foo = data[len] % 16;\n result[len*2 + 1] = foo > 9 ? 0x37 + foo : 0x30 + foo;\n foo = data[len] >> 4;\n result[len*2] = foo > 9 ? 0x37 + foo : 0x30 + foo;\n }\n return (char*)result;\n}\n\nvoid * hex2bin(char *data_) {\n int len = strlen(data_) / 2;\n unsigned char *data = (unsigned char*)data_;\n // never tested with lowercase letters!\n unsigned char *result = malloc(sizeof(char) * len + 1);\n int foo, bar;\n result[len] = 0;\n for (len-- ; len >= 0 ; len--) {\n foo = data[len*2];\n if (foo < 0x41) {\n // is a digit\n bar = foo - 0x30;\n } else if (foo < 0x61) {\n // is a uppercase letter\n bar = foo - 0x37;\n } else {\n // is a lowercase letter\n bar = foo - 0x57;\n }\n result[len] = bar << 4;\n \n foo = data[len*2 + 1];\n if (foo < 0x41) {\n // is a digit\n bar = foo - 0x30;\n } else if (foo < 0x61) {\n // is a uppercase letter\n bar = foo - 0x37;\n } else {\n // is a lowercase letter\n bar = foo - 0x57;\n }\n result[len] += bar;\n }\n return (void*)result;\n}\n\n// C does not support binary constants, but gcc >= 4.3 does.\n// Because we can't really expect people to update their compilers in four\n// years (4.3 is from march 2008), the following defines will substitute\n// the three values used by base64Encode with their decimal equivalent.\n#define B_11 3\n#define B_1111 15\n#define B_111111 63\nchar * base64Encode(void *data_, int len) {\n unsigned char *data = data_;\n static const char *b64 = \"\\\nABCDEFGHIJKLMNOPQRSTUVWXYZ\\\nabcdefghijklmnopqrstuvwxyz\\\n0123456789+/\";\n int blocks = (len + 2) / 3;\n int newlen = blocks * 4 + 1;\n char *result = malloc(newlen);\n char *resptr = result;\n int i;\n \n for (i = len / 3 ; i > 0 ; i--) {\n resptr[0] = b64[ data[0] >> 2 ];\n resptr[1] = b64[ (data[0] & B_11) << 4\n | data[1] >> 4 ];\n resptr[2] = b64[ (data[1] & B_1111) << 2\n | data[2] >> 6 ];\n resptr[3] = b64[ data[2] & B_111111 ];\n resptr += 4;\n data += 3;\n }\n \n if (len < blocks * 3 - 1) {\n resptr[0] = b64[ data[0] >> 2 ];\n resptr[1] = b64[ (data[0] & B_11) << 4 ];\n resptr[2] = '=';\n resptr[3] = '=';\n resptr += 4;\n } else if (len < blocks * 3) {\n resptr[0] = b64[ data[0] >> 2 ];\n resptr[1] = b64[ (data[0] & B_11) << 4\n | data[1] >> 4 ];\n resptr[2] = b64[ (data[1] & B_1111) << 2 ];\n resptr[3] = '=';\n resptr += 4;\n }\n \n *resptr = 0;\n return result;\n}\n\nvoid * base64Decode(char *text, int *outlen) {\n static const unsigned char b64dec[] = {\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //00\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //10\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 0, 0, 0, 63, //20\n 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 0, 0, 0, 0, 0, 0, //30\n 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, //40\n 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 0, 0, 0, 0, 0, //50\n 0, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, //60\n 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 0, 0, 0, 0, 0, //70\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //80\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //90\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //a0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //b0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //c0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //d0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //e0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 //f0\n };\n // this functions treats invalid characters as 'A'. deal with it :-P\n int inlen = (strlen(text) >> 2) << 2;\n int blocks = inlen >> 2;\n *outlen = blocks * 3 - (text[inlen-2] == '='\n ? 2 : (text[inlen-1] == '=' ? 1 : 0));\n char *result = malloc(blocks * 3);\n char *resptr = result;\n u_int8_t *text_ = (u_int8_t*)text;\n int i;\n \n for (i = 0 ; i < blocks ; i++) {\n resptr[0] = b64dec[text_[0]] << 2 | b64dec[text_[1]] >> 4;\n resptr[1] = b64dec[text_[1]] << 4 | b64dec[text_[2]] >> 2;\n resptr[2] = b64dec[text_[2]] << 6 | b64dec[text_[3]];\n \n text_ += 4;\n resptr += 3;\n }\n \n return (void*)result;\n}\n\nint isBase64(char *text) {\n static const char *b64 = \"\\\nABCDEFGHIJKLMNOPQRSTUVWXYZ\\\nabcdefghijklmnopqrstuvwxyz\\\n0123456789+/=\";\n return strlen(text) == strspn(text, b64);\n}\n\nchar * queryGetParam(char *query, char *name) {\n char *begin = index(query, '&');\n char *end;\n int nameLen = strlen(name);\n \n while (begin != NULL) {\n begin++;\n if (strncmp(begin, name, nameLen) == 0 && begin[nameLen] == '=') {\n begin += nameLen + 1;\n end = index(begin, '&');\n if (end == NULL)\n end = begin + strlen(begin);\n char *result = malloc(end - begin + 1);\n strncpy(result, begin, end - begin);\n result[end - begin] = 0;\n return result;\n }\n begin = index(begin, '&');\n }\n return NULL;\n}\n\nvoid quote(char *message) {\n char line[LINE_LENGTH + 1];\n line[0] = '>';\n line[1] = ' ';\n int index = 2;\n \n while (*message != 0) {\n if (*message < 0x20 || *message > 0x7E) {\n line[index++] = ' ';\n } else {\n line[index++] = *message;\n }\n if (index == LINE_LENGTH) {\n line[index++] = '\\n';\n fwrite(line, index, 1, stderr);\n line[0] = '>';\n line[1] = ' ';\n index = 2;\n }\n message++;\n }\n line[index++] = '\\n';\n if (index != 3) fwrite(line, index, 1, stderr);\n}\n\nvoid dumpQuerystring(char *query) {\n int length = strlen(query);\n char line[LINE_LENGTH + 1];\n int index = 0;\n \n if (*query == '&') {\n line[0] = '&';\n index++;\n query++;\n }\n \n for (; length > 0 ; length --) {\n if (*query == '&') {\n line[index] = '\\n';\n fwrite(line, index + 1, 1, stderr);\n index = 0;\n }\n line[index] = *query;\n \n index++;\n if (index == LINE_LENGTH) {\n line[index] = '\\n';\n fwrite(line, index + 1, 1, stderr);\n line[0] = ' ';\n index = 1;\n }\n query++;\n }\n line[index] = '\\n';\n if (index != LINE_LENGTH) fwrite(line, index + 1, 1, stderr);\n}\n\nvoid dumpHex(void *data_, int len) {\n unsigned char *data = data_;\n unsigned char *line = malloc(sizeof(char) * LINE_LENGTH + 1);\n char *hexrep_orig = bin2hex(data, len);\n char *hexrep = hexrep_orig;\n int i, pos;\n \n for (pos = 0 ; pos < len ; pos += 16) {\n for (i = 0 ; i < 8 ; i++) {\n line[i*3] = pos+i < len ? hexrep[i*2] : ' ';\n line[i*3+1] = pos+i < len ? hexrep[i*2+1] : ' ';\n line[i*3+2] = ' ';\n }\n line[24] = ' ';\n for (i = 8 ; i < 16 ; i++) {\n line[i*3+1] = pos+i < len ? hexrep[i*2] : ' ';\n line[i*3+2] = pos+i < len ? hexrep[i*2+1] : ' ';\n line[i*3+3] = ' ';\n }\n line[49] = ' ';\n line[50] = '|';\n for (i = 0 ; i < 16 ; i++) {\n if (data[pos+i] >= 0x20 && data[pos+i] < 0x7f) {\n line[51+i] = pos+i < len ? data[pos+i] : ' ';\n } else {\n line[51+i] = pos+i < len ? '.' : ' ';\n }\n }\n line[67] = '|';\n \n line[68] = 0;\n fprintf(stderr, \"%08x %s\\n\", pos, line);\n hexrep += 32;\n }\n fprintf(stderr, \"%08x\\n\", len);\n free(line);\n free(hexrep_orig);\n}\n\n/* special case length=0 means 'finished' */\nvoid showProgress(long long position, long long length) {\n static long long oldpos = 0;\n static unsigned int blocknum = 0;\n const char progressbar[41] = \"========================================\";\n const char *rotatingFoo = \"|/-\\\\\";\n\n if (logfilemode)\n return;\n if (length > 0) {\n if (oldpos > position) {\n oldpos = 0;\n blocknum = 0;\n }\n if (position - oldpos >= 2097152 || position == 0) {\n if (opts.guimode == 0) {\n fprintf(stderr, \"[%-40.*s] %3i%% %c\\r\", (int)(position*40/length),\n progressbar, (int)(position*100/length),\n rotatingFoo[blocknum++ % 4]);\n } else {\n fprintf(stderr, \"gui> %3i\\n\", (int)(position*100/length));\n }\n fflush(stderr);\n oldpos = position;\n }\n } else {\n if (opts.guimode == 0) {\n fputs(\"[========================================] 100% \\n\", stderr);\n } else {\n fputs(\"gui> Finished\\n\", stderr);\n }\n oldpos = 0;\n blocknum = 0;\n }\n}\n\n// ###################### special functions ####################\n\nchar * getHeader() {\n unsigned char *header = malloc(sizeof(char) * 513);\n if (fread(header, 512, 1, file) < 1 && !feof(file))\n PERROR(\"Error reading file\");\n if (feof(file))\n ERROR(\"Error: unexpected end of file\");\n MCRYPT blowfish;\n blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"ecb\", NULL);\n unsigned char hardKey[] = {\n 0xEF, 0x3A, 0xB2, 0x9C, 0xD1, 0x9F, 0x0C, 0xAC,\n 0x57, 0x59, 0xC7, 0xAB, 0xD1, 0x2C, 0xC9, 0x2B,\n 0xA3, 0xFE, 0x0A, 0xFE, 0xBF, 0x96, 0x0D, 0x63,\n 0xFE, 0xBD, 0x0F, 0x45};\n mcrypt_generic_init(blowfish, hardKey, 28, NULL);\n mdecrypt_generic(blowfish, header, 512);\n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n header[512] = 0;\n \n char *padding = strstr((char*)header, \"&PD=\");\n if (padding == NULL)\n ERROR(\"Corrupted header: could not find padding\");\n *padding = 0;\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nDumping decrypted header:\\n\", stderr);\n dumpQuerystring((char*)header);\n fputs(\"\\n\", stderr);\n }\n return (char*)header;\n}\n\nvoid * generateBigkey(char *date) {\n char *mailhash = bin2hex(MD5(\n (unsigned char*)email, strlen(email), NULL), 16);\n char *passhash = bin2hex(MD5(\n (unsigned char*)password, strlen(password), NULL), 16);\n char *bigkey_hex = malloc(57 * sizeof(char));\n char *ptr = bigkey_hex;\n \n strncpy(ptr, mailhash, 13);\n ptr += 13;\n \n strncpy(ptr, date, 4);\n date += 4;\n ptr += 4;\n \n strncpy(ptr, passhash, 11);\n ptr += 11;\n \n strncpy(ptr, date, 2);\n date += 2;\n ptr += 2;\n \n strncpy(ptr, mailhash + 21, 11);\n ptr += 11;\n \n strncpy(ptr, date, 2);\n ptr += 2;\n \n strncpy(ptr, passhash + 19, 13);\n ptr += 13;\n \n *ptr = 0;\n \n if (opts.verbosity >= VERB_DEBUG) {\n fprintf(stderr, \"\\nGenerated BigKey: %s\\n\\n\", bigkey_hex);\n }\n \n void *res = hex2bin(bigkey_hex);\n \n free(bigkey_hex);\n free(mailhash);\n free(passhash);\n return res;\n}\n\nchar * generateRequest(void *bigkey, char *date) {\n char *headerFN = queryGetParam(header, \"FN\");\n char *thatohthing = queryGetParam(header, \"OH\");\n MCRYPT blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"cbc\", NULL);\n char *iv = malloc(mcrypt_enc_get_iv_size(blowfish));\n char *code = malloc(513);\n char *dump = malloc(513);\n char *result = malloc(1024); // base64-encoded code is 680 bytes\n \n memset(iv, 0x42, mcrypt_enc_get_iv_size(blowfish));\n memset(dump, 'd', 512);\n dump[512] = 0;\n \n snprintf(code, 513, \"FOOOOBAR\\\n&OS=01677e4c0ae5468b9b8b823487f14524\\\n&M=01677e4c0ae5468b9b8b823487f14524\\\n&LN=DE\\\n&VN=1.4.1132\\\n&IR=TRUE\\\n&IK=aFzW1tL7nP9vXd8yUfB5kLoSyATQ\\\n&FN=%s\\\n&OH=%s\\\n&A=%s\\\n&P=%s\\\n&D=%s\", headerFN, thatohthing, email, password, dump);\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nGenerated request-'code':\\n\", stderr);\n dumpQuerystring(code);\n fputs(\"\\n\", stderr);\n }\n \n mcrypt_generic_init(blowfish, bigkey, 28, iv);\n mcrypt_generic(blowfish, code, 512);\n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nEncrypted request-'code':\\n\", stderr);\n dumpHex(code, 512);\n fputs(\"\\n\", stderr);\n }\n \n snprintf(result, 1024, \"http://87.236.198.182/quelle_neu1.php\\\n?code=%s\\\n&AA=%s\\\n&ZZ=%s\", base64Encode(code, 512), email, date);\n \n if (opts.verbosity >= VERB_DEBUG) {\n fprintf(stderr, \"\\nRequest:\\n%s\\n\\n\", result);\n }\n \n free(code);\n free(dump);\n free(iv);\n free(headerFN);\n free(thatohthing);\n return result;\n}\n\nstruct MemoryStruct * contactServer(char *request) {\n // http://curl.haxx.se/libcurl/c/getinmemory.html\n CURL *curl_handle;\n char errorstr[CURL_ERROR_SIZE];\n \n struct MemoryStruct *chunk = malloc(sizeof(struct MemoryStruct));\n chunk->memory=NULL; /* we expect realloc(NULL, size) to work */ \n chunk->size = 0; /* no data at this point */ \n \n curl_global_init(CURL_GLOBAL_ALL);\n \n /* init the curl session */ \n curl_handle = curl_easy_init();\n \n /* specify URL to get */ \n curl_easy_setopt(curl_handle, CURLOPT_URL, request);\n \n /* send all data to this function */ \n curl_easy_setopt(curl_handle, CURLOPT_WRITEFUNCTION, WriteMemoryCallback);\n \n /* we pass our 'chunk' struct to the callback function */ \n curl_easy_setopt(curl_handle, CURLOPT_WRITEDATA, (void *)chunk);\n \n /* imitate the original OTR client */ \n curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, \"Linux-OTR-Decoder/0.4.592\");\n curl_easy_setopt(curl_handle, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_0);\n \n /* set verbosity and error message buffer */\n if (opts.verbosity >= VERB_DEBUG)\n curl_easy_setopt(curl_handle, CURLOPT_VERBOSE, 1);\n curl_easy_setopt(curl_handle, CURLOPT_ERRORBUFFER, errorstr);\n \n /* get it! */ \n if (curl_easy_perform(curl_handle) != 0)\n ERROR(\"cURL error: %s\", errorstr);\n \n /* cleanup curl stuff */ \n curl_easy_cleanup(curl_handle);\n \n /*\n * Now, our chunk.memory points to a memory block that is chunk.size\n * bytes big and contains the remote file.\n *\n * Do something nice with it!\n *\n * You should be aware of the fact that at this point we might have an\n * allocated data block, and nothing has yet deallocated that data. So when\n * you're done with it, you should free() it as a nice application.\n */ \n \n /* we're done with libcurl, so clean it up */ \n curl_global_cleanup();\n \n // null-terminate response\n chunk->memory = realloc(chunk->memory, chunk->size + 1);\n if (chunk->memory == NULL) PERROR(\"realloc\");\n chunk->memory[chunk->size] = 0;\n return chunk;\n}\n\nchar * decryptResponse(char *response, int length, void *bigkey) {\n MCRYPT blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"cbc\", NULL);\n \n if (length < mcrypt_enc_get_iv_size(blowfish) || length < 8)\n return NULL;\n length -= 8;\n \n char *result = malloc(length);\n memcpy(result, response+8, length);\n \n mcrypt_generic_init(blowfish, bigkey, 28, response);\n mdecrypt_generic(blowfish, result, length);\n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n \n char *padding = strstr(result, \"&D=\");\n if (padding == NULL)\n ERROR(\"Corrupted response: could not find padding\");\n *padding = 0;\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nDecrypted response:\\n\", stderr);\n dumpQuerystring(result);\n fputs(\"\\n\", stderr);\n }\n \n return result;\n}\n\nvoid keycache_open() {\n char *home, *keyfilename;\n \n if ((home = getenv(\"HOME\")) == NULL) return;\n keyfilename = malloc(strlen(home) + 20);\n strcpy(keyfilename, home);\n strcat(keyfilename, \"/.otrkey_cache\");\n keyfile = fopen(keyfilename, \"a+\");\n free(keyfilename);\n}\n\nchar *keycache_get(const char *fh) {\n char *cachephrase, *cachefh;\n static char line[512];\n \n if (fh == NULL || keyfile == NULL) return NULL;\n rewind(keyfile);\n while (fgets(line, sizeof(line), keyfile) != NULL) {\n cachefh = strtok(line, \" \\t\\r\\n\");\n cachephrase = strtok(NULL, \" \\t\\r\\n\");\n if (cachephrase == NULL || cachefh == NULL) continue;\n if (strcmp(cachefh, fh) == 0) return cachephrase;\n }\n if (!feof(keyfile)) PERROR(\"fgets\");\n return NULL;\n}\n\nvoid keycache_put(const char *fh, const char *keyphrase) {\n char *cachephrase, *fn;\n \n if (fh == NULL || keyfile == NULL) return;\n if ((cachephrase = keycache_get(fh)) != NULL) {\n if (strcmp(keyphrase, cachephrase) != 0)\n fputs(\"warning: differing keyphrase was found in cache file!\\n\", stderr);\n else\n fputs(\"info: keyphrase was already in cache\\n\", stderr);\n return;\n }\n fn = queryGetParam(header, \"FN\");\n if (fprintf(keyfile, \"%s\\t%s\\t# %s\\n\", fh, keyphrase, fn) < 0)\n PERROR(\"fprintf\");\n fflush(keyfile);\n fputs(\"info: saved keyphrase to ~/.otrkey_cache\\n\", stderr);\n}\n\nvoid fetchKeyphrase() {\n struct termios ios0, ios1;\n time_t time_ = time(NULL);\n char *date = malloc(9);\n strftime(date, 9, \"%Y%m%d\", gmtime(&time_));\n \n if (info) {\n free(info);\n info = NULL;\n }\n \n if (opts.email == NULL) {\n if (!interactive) ERROR(\"Email address not specified\");\n opts.email = malloc(51);\n fputs(\"Enter your eMail-address: \", stderr);\n if (fscanf(ttyfile, \"%50s\", opts.email) < 1)\n ERROR(\"Email invalid\");\n while (fgetc(ttyfile) != '\\n');\n }\n email = strdup(opts.email);\n\n if (opts.password == NULL) {\n if (!interactive) ERROR(\"Password not specified\");\n opts.password = malloc(51);\n fputs(\"Enter your password: \", stderr);\n tcgetattr(fileno(ttyfile), &ios0);\n ios1 = ios0;\n ios1.c_lflag &= ~ECHO;\n tcsetattr(fileno(ttyfile), TCSAFLUSH, &ios1);\n if (fscanf(ttyfile, \"%50s\", opts.password) < 1) {\n tcsetattr(0, TCSAFLUSH, &ios0);\n ERROR(\"Password invalid\");\n }\n tcsetattr(fileno(ttyfile), TCSAFLUSH, &ios0);\n while (fgetc(ttyfile) != '\\n');\n fputc('\\n', stderr);\n }\n password = strdup(opts.password);\n \n char *bigkey = generateBigkey(date);\n char *request = generateRequest(bigkey, date);\n free(email);\n free(password);\n \n fputs(\"Trying to contact server...\\n\", stderr);\n struct MemoryStruct *response = contactServer(request);\n\n if (response->size == 0 || response->memory == NULL) {\n ERROR(\"Server sent an empty response, exiting\");\n }\n fputs(\"Server responded.\\n\", stderr);\n \n // skip initial whitespace\n char *message = response->memory;\n message += strspn(message, \" \\t\\n\");\n \n if (isBase64(message) == 0) {\n if (memcmp(message,\"MessageToBePrintedInDecoder\",27) ==0) {\n fputs(\"Server sent us this sweet message:\\n\", stderr);\n quote(message + 27);\n } else {\n fputs(\"Server sent us this ugly crap:\\n\", stderr);\n dumpHex(response->memory, response->size);\n }\n ERROR(\"Server response is unuseable, exiting\");\n }\n \n int info_len;\n char *info_crypted = base64Decode(message, &info_len);\n \n if (info_len % 8 != 0) {\n fputs(\"Length of response must be a multiple of 8.\", stderr);\n dumpHex(info_crypted, info_len);\n ERROR(\"Server response is unuseable, exiting\");\n }\n \n info = decryptResponse(info_crypted, info_len, bigkey);\n \n keyphrase = queryGetParam(info, \"HP\");\n if (keyphrase == NULL)\n ERROR(\"Response lacks keyphrase\");\n \n if (strlen(keyphrase) != 56)\n ERROR(\"Keyphrase has wrong length\");\n \n fprintf(stderr, \"Keyphrase: %s\\n\", keyphrase);\n keycache_put(queryGetParam(header, \"FH\"), keyphrase);\n \n free(date);\n free(bigkey);\n free(request);\n free(response->memory);\n free(response);\n free(info_crypted);\n}\n\nvoid openFile() {\n if (strcmp(\"-\", filename) == 0)\n file = stdin;\n else\n file = fopen(filename, \"rb\");\n \n if (file == NULL)\n PERROR(\"Error opening file\");\n \n char magic[11] = { 0 };\n if (fread(magic, 10, 1, file) < 1 && !feof(file))\n PERROR(\"Error reading file\");\n if (feof(file))\n ERROR(\"Error: unexpected end of file\");\n if (strcmp(magic, \"OTRKEYFILE\") != 0)\n ERROR(\"Wrong file format\");\n \n header = getHeader();\n}\n\ntypedef struct verifyFile_ctx {\n MD5_CTX ctx;\n char hash1[16];\n int input;\n} vfy_t;\n\nvoid verifyFile_init(vfy_t *vfy, int input) {\n char *hash_hex, *hash;\n int i;\n \n memset(vfy, 0, sizeof(*vfy));\n vfy->input = input;\n \n /* get MD5 sum from 'OH' or 'FH' header field */\n hash_hex = queryGetParam(header, vfy->input?\"OH\":\"FH\");\n if (hash_hex == NULL || strlen(hash_hex) != 48)\n ERROR(\"Missing hash in file header / unexpected format\");\n for (i=1; i<16; ++i) {\n hash_hex[2*i] = hash_hex[3*i];\n hash_hex[2*i+1] = hash_hex[3*i+1];\n }\n hash_hex[32] = 0;\n if (opts.verbosity >= VERB_DEBUG)\n fprintf(stderr, \"Checking %s against MD5 sum: %s\\n\",\n vfy->input?\"input\":\"output\", hash_hex);\n hash = hex2bin(hash_hex);\n memcpy(vfy->hash1, hash, 16);\n \n /* calculate MD5 sum of file (without header) */\n memset(&vfy->ctx, 0, sizeof(vfy->ctx));\n MD5_Init(&vfy->ctx);\n \n free(hash_hex);\n free(hash);\n}\n\nvoid verifyFile_data(vfy_t *vfy, char *buffer, size_t len) {\n MD5_Update(&vfy->ctx, buffer, len);\n}\n\nvoid verifyFile_final(vfy_t *vfy) {\n unsigned char md5[16];\n \n MD5_Final(md5, &vfy->ctx);\n if (memcmp(vfy->hash1, md5, 16) != 0) {\n if (vfy->input)\n ERROR(\"Input file had errors. Output may or may not be usable.\");\n else\n ERROR(\"Output verification failed. Wrong key?\");\n }\n}\n\nvoid verifyOnly() {\n vfy_t vfy;\n size_t n;\n static char buffer[65536];\n unsigned long long length;\n unsigned long long position;\n\n length = atoll(queryGetParam(header, \"SZ\")) - 522;\n fputs(\"Verifying otrkey...\\n\", stderr);\n verifyFile_init(&vfy, 1);\n for (position = 0; position < length; position += n) {\n showProgress(position, length);\n n = fread(buffer, 1, MIN(length - position, sizeof(buffer)), file);\n if (n == 0 || ferror(file)) break;\n verifyFile_data(&vfy, buffer, n);\n }\n if (position < length) {\n if (!feof(file)) PERROR(\"fread\");\n if (!logfilemode) fputc('\\n', stderr);\n fputs(\"file is too short\\n\", stderr);\n }\n else\n showProgress(1, 0);\n\n if (fread(buffer, 1, 1, file) > 0)\n fputs(\"file contains trailing garbage\\n\", stderr);\n else if (!feof(file))\n PERROR(\"fread\");\n verifyFile_final(&vfy);\n fputs(\"file is OK\\n\", stderr);\n}\n\nvoid decryptFile() {\n int fd;\n char *headerFN;\n struct stat st;\n FILE *destfile;\n\n if (opts.destfile == NULL) {\n headerFN = queryGetParam(header, \"FN\");\n if (opts.destdir != NULL) {\n destfilename = malloc(strlen(opts.destdir) + strlen(headerFN) + 2);\n strcpy(destfilename, opts.destdir);\n strcat(destfilename, \"/\");\n strcat(destfilename, headerFN);\n free(headerFN);\n }\n else {\n destfilename = headerFN;\n }\n }\n else {\n destfilename = strdup(opts.destfile);\n }\n \n if (strcmp(destfilename, \"-\") == 0) {\n if (isatty(1)) ERROR(\"error: cowardly refusing to output to a terminal\");\n fd = 1;\n }\n else\n fd = open(destfilename, O_WRONLY|O_CREAT|O_EXCL, CREAT_MODE);\n if (fd < 0 && errno == EEXIST) {\n if (stat(destfilename, &st) != 0 || S_ISREG(st.st_mode)) {\n if (!interactive) ERROR(\"Destination file exists: %s\", destfilename);\n fprintf(stderr, \"Destination file exists: %s\\nType y to overwrite: \",\n destfilename);\n if (fgetc(ttyfile) != 'y') exit(EXIT_FAILURE);\n while (fgetc(ttyfile) != '\\n');\n fd = open(destfilename, O_WRONLY|O_TRUNC, 0);\n }\n else\n fd = open(destfilename, O_WRONLY, 0);\n }\n if (fd < 0)\n PERROR(\"Error opening destination file: %s\", destfilename);\n if ((destfile = fdopen(fd, \"wb\")) == NULL)\n PERROR(\"fdopen\");\n \n fputs(\"Decrypting and verifying...\\n\", stderr); // -----------------------\n \n void *key = hex2bin(keyphrase);\n MCRYPT blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"ecb\", NULL);\n mcrypt_generic_init(blowfish, key, 28, NULL);\n \n unsigned long long length = atoll(queryGetParam(header, \"SZ\")) - 522;\n unsigned long long position = 0;\n size_t readsize;\n size_t writesize;\n static char buffer[65536];\n vfy_t vfy_in, vfy_out;\n \n verifyFile_init(&vfy_in, 1);\n verifyFile_init(&vfy_out, 0);\n \n while (position < length) {\n showProgress(position, length);\n\n if (length - position >= sizeof(buffer)) {\n readsize = fread(buffer, 1, sizeof(buffer), file);\n } else {\n readsize = fread(buffer, 1, length - position, file);\n }\n if (readsize <= 0) {\n if (feof(file))\n ERROR(\"Input file is too short\");\n PERROR(\"Error reading input file\");\n }\n \n verifyFile_data(&vfy_in, buffer, readsize);\n /* If the payload length is not a multiple of eight,\n * the last few bytes are stored unencrypted */\n mdecrypt_generic(blowfish, buffer, readsize - readsize % 8);\n verifyFile_data(&vfy_out, buffer, readsize);\n \n writesize = fwrite(buffer, 1, readsize, destfile);\n if (writesize != readsize)\n PERROR(\"Error writing to destination file\");\n \n position += writesize;\n }\n showProgress(1, 0);\n\n verifyFile_final(&vfy_in);\n verifyFile_final(&vfy_out);\n fputs(\"OK checksums from header match\\n\", stderr);\n \n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n \n if (fclose(destfile) != 0)\n PERROR(\"Error closing destination file.\");\n\n if (opts.unlinkmode) {\n if (strcmp(filename, \"-\") != 0 &&\n stat(filename, &st) == 0 && S_ISREG(st.st_mode) &&\n strcmp(destfilename, \"-\") != 0 &&\n stat(destfilename, &st) == 0 && S_ISREG(st.st_mode)) {\n if (unlink(filename) != 0)\n PERROR(\"Cannot delete input file\");\n else\n fputs(\"info: input file has been deleted\\n\", stderr);\n }\n else {\n fputs(\"Warning: Not deleting input file (input or \"\n \"output is not a regular file)\\n\", stderr);\n }\n }\n \n free(key);\n free(destfilename);\n}\n\nvoid processFile() {\n int storeKeyphrase;\n switch (opts.action) {\n case ACTION_INFO:\n // TODO: output something nicer than just the querystring\n dumpQuerystring(header);\n break;\n case ACTION_FETCHKEY:\n fetchKeyphrase();\n break;\n case ACTION_DECRYPT:\n storeKeyphrase = 1;\n if (opts.keyphrase == NULL) {\n storeKeyphrase = 0;\n keyphrase = keycache_get(queryGetParam(header, \"FH\"));\n if (keyphrase)\n fprintf(stderr, \"Keyphrase from cache: %s\\n\", keyphrase);\n else\n fetchKeyphrase();\n }\n else {\n keyphrase = strdup(opts.keyphrase);\n }\n decryptFile();\n if (storeKeyphrase)\n keycache_put(queryGetParam(header, \"FH\"), keyphrase);\n break;\n case ACTION_VERIFY:\n verifyOnly();\n break;\n }\n}\n\nvoid usageError() {\n fputs(\"\\n\"\n \"Usage: otrtool [-h] [-v] [-i|-f|-x|-y] [-u]\\n\"\n \" [-k ] [-e ] [-p ]\\n\"\n \" [-D ] [-O ]\\n\"\n \" [ ... []]\\n\"\n \"\\n\"\n \"MODES OF OPERATION\\n\"\n \" -i | Display information about file (default action)\\n\"\n \" -f | Fetch keyphrase for file\\n\"\n \" -x | Decrypt file\\n\"\n \" -y | Verify only\\n\"\n \"\\n\"\n \"FREQUENTLY USED OPTIONS\\n\"\n \" -k | Do not fetch keyphrase, use this one\\n\"\n \" -D | Output folder\\n\"\n \" -O | Output file (overrides -D)\\n\"\n \" -u | Delete otrkey-files after successful decryption\\n\"\n \"\\n\"\n \"See otrtool(1) for further information\\n\", stderr);\n}\n\nint main(int argc, char *argv[]) {\n fputs(\"OTR-Tool, \" VERSION \"\\n\", stderr);\n\n int i;\n int opt;\n while ( (opt = getopt(argc, argv, \"hvgifxyk:e:p:D:O:u\")) != -1) {\n switch (opt) {\n case 'h':\n usageError();\n exit(EXIT_SUCCESS);\n break;\n case 'v':\n opts.verbosity = VERB_DEBUG;\n break;\n case 'g':\n opts.guimode = 1;\n interactive = 0;\n break;\n case 'i':\n opts.action = ACTION_INFO;\n break;\n case 'f':\n opts.action = ACTION_FETCHKEY;\n break;\n case 'x':\n opts.action = ACTION_DECRYPT;\n break;\n case 'y':\n opts.action = ACTION_VERIFY;\n break;\n case 'k':\n opts.keyphrase = optarg;\n break;\n case 'e':\n opts.email = strdup(optarg);\n memset(optarg, 'x', strlen(optarg));\n break;\n case 'p':\n opts.password = strdup(optarg);\n memset(optarg, 'x', strlen(optarg));\n break;\n case 'D':\n opts.destdir = optarg;\n break;\n case 'O':\n opts.destfile = optarg;\n break;\n case 'u':\n opts.unlinkmode = 1;\n break;\n default:\n usageError();\n exit(EXIT_FAILURE);\n }\n }\n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"command line: \", stderr);\n for (i = 0; i < argc; ++i) {\n fputs(argv[i], stderr);\n fputc((i == argc - 1) ? '\\n' : ' ', stderr);\n }\n }\n \n if (optind >= argc) {\n fprintf(stderr, \"Missing argument: otrkey-file\\n\");\n usageError();\n exit(EXIT_FAILURE);\n }\n if (argc > optind + 1) {\n if (opts.destfile != NULL && strcmp(opts.destfile, \"-\") == 0) {\n i = 0;\n }\n else for (i = optind; i < argc; i++) {\n if (strcmp(argv[i], \"-\") == 0)\n break;\n }\n if (i < argc)\n ERROR(\"Usage error: piping is not possible with multiple input files\");\n }\n\n if (!isatty(2)) {\n logfilemode = 1;\n interactive = 0;\n }\n if (interactive) {\n if (!isatty(0)) {\n ttyfile = fopen(\"/dev/tty\", \"r\");\n if (ttyfile == NULL) {\n if (opts.verbosity >= VERB_DEBUG) perror(\"open /dev/tty\");\n interactive = 0;\n }\n }\n else ttyfile = stdin;\n }\n\n if (opts.action == ACTION_DECRYPT || opts.action == ACTION_VERIFY) {\n errno = 0;\n nice(10);\n if (errno == 0 && opts.verbosity >= VERB_DEBUG)\n fputs(\"NICE was set to 10\\n\", stderr);\n\n // I am not sure if this really catches all errors\n // If this causes problems, just delete the ionice-stuff\n #ifdef __NR_ioprio_set\n if (syscall(__NR_ioprio_set, 1, getpid(), 7 | 3 << 13) == 0\n && opts.verbosity >= VERB_DEBUG)\n fputs(\"IONICE class was set to Idle\\n\", stderr);\n #endif\n }\n if (opts.action == ACTION_FETCHKEY || opts.action == ACTION_DECRYPT) {\n keycache_open();\n }\n\n for (i = optind; i < argc; i++) {\n filename = argv[i];\n if (argc > optind + 1)\n fprintf(stderr, \"\\n==> %s <==\\n\", filename);\n openFile();\n processFile();\n if (fclose(file) != 0)\n PERROR(\"Error closing file\");\n free(header);\n }\n \n exit(EXIT_SUCCESS);\n}\n"}, "files_after": {"src/main.c": "#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\n#include \n\n#include \n#include \"md5.h\"\n\n#include \n#include \n\n#define ERROR(...) \\\n ({fprintf(stderr, \"\\n\"); \\\n fprintf(stderr, __VA_ARGS__); \\\n fprintf(stderr, \"\\n\"); \\\n exit(EXIT_FAILURE); })\n\n#define PERROR(...) \\\n ({fprintf(stderr, \"\\n\"); \\\n fprintf(stderr, __VA_ARGS__); \\\n fprintf(stderr, \": \"); \\\n perror(NULL); \\\n exit(EXIT_FAILURE); })\n\n#define MIN(a,b) ((a)<(b)?(a):(b))\n\n#ifndef VERSION\n #define VERSION \"version unknown\"\n#endif\n\n#define LINE_LENGTH 80\n#define MAX_RESPONSE_LENGTH 1000\n#define CREAT_MODE S_IWUSR|S_IRUSR|S_IRGRP|S_IROTH\n\n#define VERB_INFO 1\n#define VERB_DEBUG 2\n\n#define ACTION_INFO 1\n#define ACTION_FETCHKEY 2\n#define ACTION_DECRYPT 3\n#define ACTION_VERIFY 4\n\n/* global options as supplied by the user via command-line etc. */\nstruct otrtool_options {\n int action;\n int verbosity;\n int guimode; // do not output \\r and stuff\n int unlinkmode;\n char *email;\n char *password;\n char *keyphrase;\n char *destdir;\n char *destfile;\n};\nstatic struct otrtool_options opts = {\n .action = ACTION_INFO,\n .verbosity = VERB_INFO,\n .guimode = 0,\n .unlinkmode = 0,\n .email = NULL,\n .password = NULL,\n .keyphrase = NULL,\n .destdir = NULL,\n .destfile = NULL,\n};\n\nstatic int interactive = 1; // ask questions instead of exiting\nstatic int logfilemode = 0; // do not output progress bar\n\nstatic char *email = NULL;\nstatic char *password = NULL;\nstatic char *keyphrase = NULL;\nstatic char *filename = NULL;\nstatic char *destfilename = NULL;\n\nstatic FILE *file = NULL;\nstatic FILE *keyfile = NULL;\nstatic FILE *ttyfile = NULL;\nstatic char *header = NULL;\nstatic char *info = NULL;\n\n// ######################## curl-stuff #######################\n\nstruct MemoryStruct {\n char *memory;\n size_t size;\n};\n\nstatic size_t WriteMemoryCallback(void *ptr, size_t size,\n size_t nmemb, void *data) {\n size_t realsize = size * nmemb;\n struct MemoryStruct *mem = (struct MemoryStruct *)data;\n char *newmem;\n \n // abort very long transfers\n if (mem->size + realsize > MAX_RESPONSE_LENGTH) {\n realsize = mem->size <= MAX_RESPONSE_LENGTH\n ? MAX_RESPONSE_LENGTH - mem->size\n : 0;\n }\n if (realsize < 1) return 0;\n \n // \"If realloc() fails the original block is left untouched\" (man 3 realloc)\n newmem = realloc(mem->memory, mem->size + realsize);\n if (newmem != NULL) {\n mem->memory = newmem;\n memcpy(&(mem->memory[mem->size]), ptr, realsize);\n mem->size += realsize;\n } else return 0;\n return realsize;\n}\n\n// ######################## generic functions ####################\n\nchar * bin2hex(void *data_, int len) {\n unsigned char *data = data_;\n unsigned char *result = malloc(sizeof(char) * len * 2 + 1);\n result[len * 2] = 0;\n int foo;\n for (len-- ; len >= 0 ; len--) {\n foo = data[len] % 16;\n result[len*2 + 1] = foo > 9 ? 0x37 + foo : 0x30 + foo;\n foo = data[len] >> 4;\n result[len*2] = foo > 9 ? 0x37 + foo : 0x30 + foo;\n }\n return (char*)result;\n}\n\nvoid * hex2bin(char *data_) {\n int len = strlen(data_) / 2;\n unsigned char *data = (unsigned char*)data_;\n // never tested with lowercase letters!\n unsigned char *result = malloc(sizeof(char) * len + 1);\n int foo, bar;\n result[len] = 0;\n for (len-- ; len >= 0 ; len--) {\n foo = data[len*2];\n if (foo < 0x41) {\n // is a digit\n bar = foo - 0x30;\n } else if (foo < 0x61) {\n // is a uppercase letter\n bar = foo - 0x37;\n } else {\n // is a lowercase letter\n bar = foo - 0x57;\n }\n result[len] = bar << 4;\n \n foo = data[len*2 + 1];\n if (foo < 0x41) {\n // is a digit\n bar = foo - 0x30;\n } else if (foo < 0x61) {\n // is a uppercase letter\n bar = foo - 0x37;\n } else {\n // is a lowercase letter\n bar = foo - 0x57;\n }\n result[len] += bar;\n }\n return (void*)result;\n}\n\n// C does not support binary constants, but gcc >= 4.3 does.\n// Because we can't really expect people to update their compilers in four\n// years (4.3 is from march 2008), the following defines will substitute\n// the three values used by base64Encode with their decimal equivalent.\n#define B_11 3\n#define B_1111 15\n#define B_111111 63\nchar * base64Encode(void *data_, int len) {\n unsigned char *data = data_;\n static const char *b64 = \"\\\nABCDEFGHIJKLMNOPQRSTUVWXYZ\\\nabcdefghijklmnopqrstuvwxyz\\\n0123456789+/\";\n int blocks = (len + 2) / 3;\n int newlen = blocks * 4 + 1;\n char *result = malloc(newlen);\n char *resptr = result;\n int i;\n \n for (i = len / 3 ; i > 0 ; i--) {\n resptr[0] = b64[ data[0] >> 2 ];\n resptr[1] = b64[ (data[0] & B_11) << 4\n | data[1] >> 4 ];\n resptr[2] = b64[ (data[1] & B_1111) << 2\n | data[2] >> 6 ];\n resptr[3] = b64[ data[2] & B_111111 ];\n resptr += 4;\n data += 3;\n }\n \n if (len < blocks * 3 - 1) {\n resptr[0] = b64[ data[0] >> 2 ];\n resptr[1] = b64[ (data[0] & B_11) << 4 ];\n resptr[2] = '=';\n resptr[3] = '=';\n resptr += 4;\n } else if (len < blocks * 3) {\n resptr[0] = b64[ data[0] >> 2 ];\n resptr[1] = b64[ (data[0] & B_11) << 4\n | data[1] >> 4 ];\n resptr[2] = b64[ (data[1] & B_1111) << 2 ];\n resptr[3] = '=';\n resptr += 4;\n }\n \n *resptr = 0;\n return result;\n}\n\nvoid * base64Decode(char *text, int *outlen) {\n static const unsigned char b64dec[] = {\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //00\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //10\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 0, 0, 0, 63, //20\n 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 0, 0, 0, 0, 0, 0, //30\n 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, //40\n 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 0, 0, 0, 0, 0, //50\n 0, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, //60\n 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 0, 0, 0, 0, 0, //70\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //80\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //90\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //a0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //b0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //c0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //d0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //e0\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 //f0\n };\n // this functions treats invalid characters as 'A'. deal with it :-P\n int inlen = (strlen(text) >> 2) << 2;\n int blocks = inlen >> 2;\n *outlen = blocks * 3 - (text[inlen-2] == '='\n ? 2 : (text[inlen-1] == '=' ? 1 : 0));\n char *result = malloc(blocks * 3);\n char *resptr = result;\n u_int8_t *text_ = (u_int8_t*)text;\n int i;\n \n for (i = 0 ; i < blocks ; i++) {\n resptr[0] = b64dec[text_[0]] << 2 | b64dec[text_[1]] >> 4;\n resptr[1] = b64dec[text_[1]] << 4 | b64dec[text_[2]] >> 2;\n resptr[2] = b64dec[text_[2]] << 6 | b64dec[text_[3]];\n \n text_ += 4;\n resptr += 3;\n }\n \n return (void*)result;\n}\n\nint isBase64(char *text) {\n static const char *b64 = \"\\\nABCDEFGHIJKLMNOPQRSTUVWXYZ\\\nabcdefghijklmnopqrstuvwxyz\\\n0123456789+/=\";\n return strlen(text) == strspn(text, b64);\n}\n\nchar * queryGetParam(char *query, char *name) {\n char *begin = index(query, '&');\n char *end;\n int nameLen = strlen(name);\n \n while (begin != NULL) {\n begin++;\n if (strncmp(begin, name, nameLen) == 0 && begin[nameLen] == '=') {\n begin += nameLen + 1;\n end = index(begin, '&');\n if (end == NULL)\n end = begin + strlen(begin);\n char *result = malloc(end - begin + 1);\n strncpy(result, begin, end - begin);\n result[end - begin] = 0;\n return result;\n }\n begin = index(begin, '&');\n }\n return NULL;\n}\n\nvoid quote(char *message) {\n char line[LINE_LENGTH + 1];\n line[0] = '>';\n line[1] = ' ';\n int index = 2;\n \n while (*message != 0) {\n if (*message < 0x20 || *message > 0x7E) {\n line[index++] = ' ';\n } else {\n line[index++] = *message;\n }\n if (index == LINE_LENGTH) {\n line[index++] = '\\n';\n fwrite(line, index, 1, stderr);\n line[0] = '>';\n line[1] = ' ';\n index = 2;\n }\n message++;\n }\n line[index++] = '\\n';\n if (index != 3) fwrite(line, index, 1, stderr);\n}\n\nvoid dumpQuerystring(char *query) {\n int length = strlen(query);\n char line[LINE_LENGTH + 1];\n int index = 0;\n \n if (*query == '&') {\n line[0] = '&';\n index++;\n query++;\n }\n \n for (; length > 0 ; length --) {\n if (*query == '&') {\n line[index] = '\\n';\n fwrite(line, index + 1, 1, stderr);\n index = 0;\n }\n line[index] = *query;\n \n index++;\n if (index == LINE_LENGTH) {\n line[index] = '\\n';\n fwrite(line, index + 1, 1, stderr);\n line[0] = ' ';\n index = 1;\n }\n query++;\n }\n line[index] = '\\n';\n if (index != LINE_LENGTH) fwrite(line, index + 1, 1, stderr);\n}\n\nvoid dumpHex(void *data_, int len) {\n unsigned char *data = data_;\n unsigned char *line = malloc(sizeof(char) * LINE_LENGTH + 1);\n char *hexrep_orig = bin2hex(data, len);\n char *hexrep = hexrep_orig;\n int i, pos;\n \n for (pos = 0 ; pos < len ; pos += 16) {\n for (i = 0 ; i < 8 ; i++) {\n line[i*3] = pos+i < len ? hexrep[i*2] : ' ';\n line[i*3+1] = pos+i < len ? hexrep[i*2+1] : ' ';\n line[i*3+2] = ' ';\n }\n line[24] = ' ';\n for (i = 8 ; i < 16 ; i++) {\n line[i*3+1] = pos+i < len ? hexrep[i*2] : ' ';\n line[i*3+2] = pos+i < len ? hexrep[i*2+1] : ' ';\n line[i*3+3] = ' ';\n }\n line[49] = ' ';\n line[50] = '|';\n for (i = 0 ; i < 16 ; i++) {\n if (data[pos+i] >= 0x20 && data[pos+i] < 0x7f) {\n line[51+i] = pos+i < len ? data[pos+i] : ' ';\n } else {\n line[51+i] = pos+i < len ? '.' : ' ';\n }\n }\n line[67] = '|';\n \n line[68] = 0;\n fprintf(stderr, \"%08x %s\\n\", pos, line);\n hexrep += 32;\n }\n fprintf(stderr, \"%08x\\n\", len);\n free(line);\n free(hexrep_orig);\n}\n\n/* special case length=0 means 'finished' */\nvoid showProgress(long long position, long long length) {\n static long long oldpos = 0;\n static unsigned int blocknum = 0;\n const char progressbar[41] = \"========================================\";\n const char *rotatingFoo = \"|/-\\\\\";\n\n if (logfilemode)\n return;\n if (length > 0) {\n if (oldpos > position) {\n oldpos = 0;\n blocknum = 0;\n }\n if (position - oldpos >= 2097152 || position == 0) {\n if (opts.guimode == 0) {\n fprintf(stderr, \"[%-40.*s] %3i%% %c\\r\", (int)(position*40/length),\n progressbar, (int)(position*100/length),\n rotatingFoo[blocknum++ % 4]);\n } else {\n fprintf(stderr, \"gui> %3i\\n\", (int)(position*100/length));\n }\n fflush(stderr);\n oldpos = position;\n }\n } else {\n if (opts.guimode == 0) {\n fputs(\"[========================================] 100% \\n\", stderr);\n } else {\n fputs(\"gui> Finished\\n\", stderr);\n }\n oldpos = 0;\n blocknum = 0;\n }\n}\n\n// ###################### special functions ####################\n\nchar * getHeader() {\n unsigned char *header = malloc(sizeof(char) * 513);\n if (fread(header, 512, 1, file) < 1 && !feof(file))\n PERROR(\"Error reading file\");\n if (feof(file))\n ERROR(\"Error: unexpected end of file\");\n MCRYPT blowfish;\n blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"ecb\", NULL);\n unsigned char hardKey[] = {\n 0xEF, 0x3A, 0xB2, 0x9C, 0xD1, 0x9F, 0x0C, 0xAC,\n 0x57, 0x59, 0xC7, 0xAB, 0xD1, 0x2C, 0xC9, 0x2B,\n 0xA3, 0xFE, 0x0A, 0xFE, 0xBF, 0x96, 0x0D, 0x63,\n 0xFE, 0xBD, 0x0F, 0x45};\n mcrypt_generic_init(blowfish, hardKey, 28, NULL);\n mdecrypt_generic(blowfish, header, 512);\n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n header[512] = 0;\n \n char *padding = strstr((char*)header, \"&PD=\");\n if (padding == NULL)\n ERROR(\"Corrupted header: could not find padding\");\n *padding = 0;\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nDumping decrypted header:\\n\", stderr);\n dumpQuerystring((char*)header);\n fputs(\"\\n\", stderr);\n }\n return (char*)header;\n}\n\nvoid * generateBigkey(char *date) {\n char *mailhash = bin2hex(MD5(\n (unsigned char*)email, strlen(email), NULL), 16);\n char *passhash = bin2hex(MD5(\n (unsigned char*)password, strlen(password), NULL), 16);\n char *bigkey_hex = malloc(57 * sizeof(char));\n char *ptr = bigkey_hex;\n \n strncpy(ptr, mailhash, 13);\n ptr += 13;\n \n strncpy(ptr, date, 4);\n date += 4;\n ptr += 4;\n \n strncpy(ptr, passhash, 11);\n ptr += 11;\n \n strncpy(ptr, date, 2);\n date += 2;\n ptr += 2;\n \n strncpy(ptr, mailhash + 21, 11);\n ptr += 11;\n \n strncpy(ptr, date, 2);\n ptr += 2;\n \n strncpy(ptr, passhash + 19, 13);\n ptr += 13;\n \n *ptr = 0;\n \n if (opts.verbosity >= VERB_DEBUG) {\n fprintf(stderr, \"\\nGenerated BigKey: %s\\n\\n\", bigkey_hex);\n }\n \n void *res = hex2bin(bigkey_hex);\n \n free(bigkey_hex);\n free(mailhash);\n free(passhash);\n return res;\n}\n\nchar * generateRequest(void *bigkey, char *date) {\n char *headerFN = queryGetParam(header, \"FN\");\n char *thatohthing = queryGetParam(header, \"OH\");\n MCRYPT blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"cbc\", NULL);\n char *iv = malloc(mcrypt_enc_get_iv_size(blowfish));\n char *code = malloc(513);\n char *dump = malloc(513);\n char *result = malloc(1024); // base64-encoded code is 680 bytes\n \n memset(iv, 0x42, mcrypt_enc_get_iv_size(blowfish));\n memset(dump, 'd', 512);\n dump[512] = 0;\n \n snprintf(code, 513, \"FOOOOBAR\\\n&OS=01677e4c0ae5468b9b8b823487f14524\\\n&M=01677e4c0ae5468b9b8b823487f14524\\\n&LN=DE\\\n&VN=1.4.1132\\\n&IR=TRUE\\\n&IK=aFzW1tL7nP9vXd8yUfB5kLoSyATQ\\\n&FN=%s\\\n&OH=%s\\\n&A=%s\\\n&P=%s\\\n&D=%s\", headerFN, thatohthing, email, password, dump);\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nGenerated request-'code':\\n\", stderr);\n dumpQuerystring(code);\n fputs(\"\\n\", stderr);\n }\n \n mcrypt_generic_init(blowfish, bigkey, 28, iv);\n mcrypt_generic(blowfish, code, 512);\n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nEncrypted request-'code':\\n\", stderr);\n dumpHex(code, 512);\n fputs(\"\\n\", stderr);\n }\n \n snprintf(result, 1024, \"http://87.236.198.182/quelle_neu1.php\\\n?code=%s\\\n&AA=%s\\\n&ZZ=%s\", base64Encode(code, 512), email, date);\n \n if (opts.verbosity >= VERB_DEBUG) {\n fprintf(stderr, \"\\nRequest:\\n%s\\n\\n\", result);\n }\n \n free(code);\n free(dump);\n free(iv);\n free(headerFN);\n free(thatohthing);\n return result;\n}\n\nstruct MemoryStruct * contactServer(char *request) {\n // http://curl.haxx.se/libcurl/c/getinmemory.html\n CURL *curl_handle;\n char errorstr[CURL_ERROR_SIZE];\n \n struct MemoryStruct *chunk = malloc(sizeof(struct MemoryStruct));\n chunk->memory=NULL; /* we expect realloc(NULL, size) to work */ \n chunk->size = 0; /* no data at this point */ \n \n curl_global_init(CURL_GLOBAL_ALL);\n \n /* init the curl session */ \n curl_handle = curl_easy_init();\n \n /* specify URL to get */ \n curl_easy_setopt(curl_handle, CURLOPT_URL, request);\n \n /* send all data to this function */ \n curl_easy_setopt(curl_handle, CURLOPT_WRITEFUNCTION, WriteMemoryCallback);\n \n /* we pass our 'chunk' struct to the callback function */ \n curl_easy_setopt(curl_handle, CURLOPT_WRITEDATA, (void *)chunk);\n \n /* imitate the original OTR client */ \n curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, \"Linux-OTR-Decoder/0.4.592\");\n curl_easy_setopt(curl_handle, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_0);\n \n /* set verbosity and error message buffer */\n if (opts.verbosity >= VERB_DEBUG)\n curl_easy_setopt(curl_handle, CURLOPT_VERBOSE, 1);\n curl_easy_setopt(curl_handle, CURLOPT_ERRORBUFFER, errorstr);\n \n /* get it! */ \n if (curl_easy_perform(curl_handle) != 0)\n ERROR(\"cURL error: %s\", errorstr);\n \n /* cleanup curl stuff */ \n curl_easy_cleanup(curl_handle);\n \n /*\n * Now, our chunk.memory points to a memory block that is chunk.size\n * bytes big and contains the remote file.\n *\n * Do something nice with it!\n *\n * You should be aware of the fact that at this point we might have an\n * allocated data block, and nothing has yet deallocated that data. So when\n * you're done with it, you should free() it as a nice application.\n */ \n \n /* we're done with libcurl, so clean it up */ \n curl_global_cleanup();\n \n // null-terminate response\n chunk->memory = realloc(chunk->memory, chunk->size + 1);\n if (chunk->memory == NULL) PERROR(\"realloc\");\n chunk->memory[chunk->size] = 0;\n return chunk;\n}\n\nchar * decryptResponse(char *response, int length, void *bigkey) {\n MCRYPT blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"cbc\", NULL);\n \n if (length < mcrypt_enc_get_iv_size(blowfish) || length < 8)\n return NULL;\n length -= 8;\n \n char *result = malloc(length);\n memcpy(result, response+8, length);\n \n mcrypt_generic_init(blowfish, bigkey, 28, response);\n mdecrypt_generic(blowfish, result, length);\n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n \n char *padding = strstr(result, \"&D=\");\n if (padding == NULL)\n ERROR(\"Corrupted response: could not find padding\");\n *padding = 0;\n \n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"\\nDecrypted response:\\n\", stderr);\n dumpQuerystring(result);\n fputs(\"\\n\", stderr);\n }\n \n return result;\n}\n\nvoid keycache_open() {\n char *home, *keyfilename;\n \n if ((home = getenv(\"HOME\")) == NULL) return;\n keyfilename = malloc(strlen(home) + 20);\n strcpy(keyfilename, home);\n strcat(keyfilename, \"/.otrkey_cache\");\n keyfile = fopen(keyfilename, \"a+\");\n free(keyfilename);\n}\n\nchar *keycache_get(const char *fh) {\n char *cachephrase, *cachefh;\n static char line[512];\n \n if (fh == NULL || keyfile == NULL) return NULL;\n rewind(keyfile);\n while (fgets(line, sizeof(line), keyfile) != NULL) {\n cachefh = strtok(line, \" \\t\\r\\n\");\n cachephrase = strtok(NULL, \" \\t\\r\\n\");\n if (cachephrase == NULL || cachefh == NULL) continue;\n if (strcmp(cachefh, fh) == 0) return cachephrase;\n }\n if (!feof(keyfile)) PERROR(\"fgets\");\n return NULL;\n}\n\nvoid keycache_put(const char *fh, const char *keyphrase) {\n char *cachephrase, *fn;\n \n if (fh == NULL || keyfile == NULL) return;\n if ((cachephrase = keycache_get(fh)) != NULL) {\n if (strcmp(keyphrase, cachephrase) != 0)\n fputs(\"warning: differing keyphrase was found in cache file!\\n\", stderr);\n else\n fputs(\"info: keyphrase was already in cache\\n\", stderr);\n return;\n }\n fn = queryGetParam(header, \"FN\");\n if (fprintf(keyfile, \"%s\\t%s\\t# %s\\n\", fh, keyphrase, fn) < 0)\n PERROR(\"fprintf\");\n fflush(keyfile);\n fputs(\"info: saved keyphrase to ~/.otrkey_cache\\n\", stderr);\n}\n\nvoid fetchKeyphrase() {\n struct termios ios0, ios1;\n time_t time_ = time(NULL);\n char *date = malloc(9);\n strftime(date, 9, \"%Y%m%d\", gmtime(&time_));\n \n if (info) {\n free(info);\n info = NULL;\n }\n \n if (opts.email == NULL) {\n if (!interactive) ERROR(\"Email address not specified\");\n opts.email = malloc(51);\n fputs(\"Enter your eMail-address: \", stderr);\n if (fscanf(ttyfile, \"%50s\", opts.email) < 1)\n ERROR(\"Email invalid\");\n while (fgetc(ttyfile) != '\\n');\n }\n email = strdup(opts.email);\n\n if (opts.password == NULL) {\n if (!interactive) ERROR(\"Password not specified\");\n opts.password = malloc(51);\n fputs(\"Enter your password: \", stderr);\n tcgetattr(fileno(ttyfile), &ios0);\n ios1 = ios0;\n ios1.c_lflag &= ~ECHO;\n tcsetattr(fileno(ttyfile), TCSAFLUSH, &ios1);\n if (fscanf(ttyfile, \"%50s\", opts.password) < 1) {\n tcsetattr(0, TCSAFLUSH, &ios0);\n ERROR(\"Password invalid\");\n }\n tcsetattr(fileno(ttyfile), TCSAFLUSH, &ios0);\n while (fgetc(ttyfile) != '\\n');\n fputc('\\n', stderr);\n }\n password = strdup(opts.password);\n \n char *bigkey = generateBigkey(date);\n char *request = generateRequest(bigkey, date);\n free(email);\n free(password);\n \n fputs(\"Trying to contact server...\\n\", stderr);\n struct MemoryStruct *response = contactServer(request);\n\n if (response->size == 0 || response->memory == NULL) {\n ERROR(\"Server sent an empty response, exiting\");\n }\n fputs(\"Server responded.\\n\", stderr);\n \n // skip initial whitespace\n char *message = response->memory;\n message += strspn(message, \" \\t\\n\");\n \n if (isBase64(message) == 0) {\n if (memcmp(message,\"MessageToBePrintedInDecoder\",27) ==0) {\n fputs(\"Server sent us this sweet message:\\n\", stderr);\n quote(message + 27);\n } else {\n fputs(\"Server sent us this ugly crap:\\n\", stderr);\n dumpHex(response->memory, response->size);\n }\n ERROR(\"Server response is unuseable, exiting\");\n }\n \n int info_len;\n char *info_crypted = base64Decode(message, &info_len);\n \n if (info_len % 8 != 0) {\n fputs(\"Length of response must be a multiple of 8.\", stderr);\n dumpHex(info_crypted, info_len);\n ERROR(\"Server response is unuseable, exiting\");\n }\n \n info = decryptResponse(info_crypted, info_len, bigkey);\n \n keyphrase = queryGetParam(info, \"HP\");\n if (keyphrase == NULL)\n ERROR(\"Response lacks keyphrase\");\n \n if (strlen(keyphrase) != 56)\n ERROR(\"Keyphrase has wrong length\");\n \n fprintf(stderr, \"Keyphrase: %s\\n\", keyphrase);\n keycache_put(queryGetParam(header, \"FH\"), keyphrase);\n \n free(date);\n free(bigkey);\n free(request);\n free(response->memory);\n free(response);\n free(info_crypted);\n}\n\nvoid openFile() {\n if (strcmp(\"-\", filename) == 0)\n file = stdin;\n else\n file = fopen(filename, \"rb\");\n \n if (file == NULL)\n PERROR(\"Error opening file\");\n \n char magic[11] = { 0 };\n if (fread(magic, 10, 1, file) < 1 && !feof(file))\n PERROR(\"Error reading file\");\n if (feof(file))\n ERROR(\"Error: unexpected end of file\");\n if (strcmp(magic, \"OTRKEYFILE\") != 0)\n ERROR(\"Wrong file format\");\n \n header = getHeader();\n}\n\ntypedef struct verifyFile_ctx {\n MD5_CTX ctx;\n char hash1[16];\n int input;\n} vfy_t;\n\nvoid verifyFile_init(vfy_t *vfy, int input) {\n char *hash_hex, *hash;\n int i;\n \n memset(vfy, 0, sizeof(*vfy));\n vfy->input = input;\n \n /* get MD5 sum from 'OH' or 'FH' header field */\n hash_hex = queryGetParam(header, vfy->input?\"OH\":\"FH\");\n if (hash_hex == NULL || strlen(hash_hex) != 48)\n ERROR(\"Missing hash in file header / unexpected format\");\n for (i=1; i<16; ++i) {\n hash_hex[2*i] = hash_hex[3*i];\n hash_hex[2*i+1] = hash_hex[3*i+1];\n }\n hash_hex[32] = 0;\n if (opts.verbosity >= VERB_DEBUG)\n fprintf(stderr, \"Checking %s against MD5 sum: %s\\n\",\n vfy->input?\"input\":\"output\", hash_hex);\n hash = hex2bin(hash_hex);\n memcpy(vfy->hash1, hash, 16);\n \n /* calculate MD5 sum of file (without header) */\n memset(&vfy->ctx, 0, sizeof(vfy->ctx));\n MD5_Init(&vfy->ctx);\n \n free(hash_hex);\n free(hash);\n}\n\nvoid verifyFile_data(vfy_t *vfy, char *buffer, size_t len) {\n MD5_Update(&vfy->ctx, buffer, len);\n}\n\nvoid verifyFile_final(vfy_t *vfy) {\n unsigned char md5[16];\n \n MD5_Final(md5, &vfy->ctx);\n if (memcmp(vfy->hash1, md5, 16) != 0) {\n if (vfy->input)\n ERROR(\"Input file had errors. Output may or may not be usable.\");\n else\n ERROR(\"Output verification failed. Wrong key?\");\n }\n}\n\nvoid verifyOnly() {\n vfy_t vfy;\n size_t n;\n static char buffer[65536];\n unsigned long long length;\n unsigned long long position;\n\n length = atoll(queryGetParam(header, \"SZ\")) - 522;\n fputs(\"Verifying otrkey...\\n\", stderr);\n verifyFile_init(&vfy, 1);\n for (position = 0; position < length; position += n) {\n showProgress(position, length);\n n = fread(buffer, 1, MIN(length - position, sizeof(buffer)), file);\n if (n == 0 || ferror(file)) break;\n verifyFile_data(&vfy, buffer, n);\n }\n if (position < length) {\n if (!feof(file)) PERROR(\"fread\");\n if (!logfilemode) fputc('\\n', stderr);\n fputs(\"file is too short\\n\", stderr);\n }\n else\n showProgress(1, 0);\n\n if (fread(buffer, 1, 1, file) > 0)\n fputs(\"file contains trailing garbage\\n\", stderr);\n else if (!feof(file))\n PERROR(\"fread\");\n verifyFile_final(&vfy);\n fputs(\"file is OK\\n\", stderr);\n}\n\nvoid decryptFile() {\n int fd;\n char *headerFN;\n struct stat st;\n FILE *destfile;\n\n if (opts.destfile == NULL) {\n headerFN = queryGetParam(header, \"FN\");\n if (opts.destdir != NULL) {\n destfilename = malloc(strlen(opts.destdir) + strlen(headerFN) + 2);\n strcpy(destfilename, opts.destdir);\n strcat(destfilename, \"/\");\n strcat(destfilename, headerFN);\n free(headerFN);\n }\n else {\n destfilename = headerFN;\n }\n }\n else {\n destfilename = strdup(opts.destfile);\n }\n \n if (strcmp(destfilename, \"-\") == 0) {\n if (isatty(1)) ERROR(\"error: cowardly refusing to output to a terminal\");\n fd = 1;\n }\n else\n fd = open(destfilename, O_WRONLY|O_CREAT|O_EXCL, CREAT_MODE);\n if (fd < 0 && errno == EEXIST) {\n if (stat(destfilename, &st) != 0 || S_ISREG(st.st_mode)) {\n if (!interactive) ERROR(\"Destination file exists: %s\", destfilename);\n fprintf(stderr, \"Destination file exists: %s\\nType y to overwrite: \",\n destfilename);\n if (fgetc(ttyfile) != 'y') exit(EXIT_FAILURE);\n while (fgetc(ttyfile) != '\\n');\n fd = open(destfilename, O_WRONLY|O_TRUNC, 0);\n }\n else\n fd = open(destfilename, O_WRONLY, 0);\n }\n if (fd < 0)\n PERROR(\"Error opening destination file: %s\", destfilename);\n if ((destfile = fdopen(fd, \"wb\")) == NULL)\n PERROR(\"fdopen\");\n \n fputs(\"Decrypting and verifying...\\n\", stderr); // -----------------------\n \n void *key = hex2bin(keyphrase);\n MCRYPT blowfish = mcrypt_module_open(\"blowfish-compat\", NULL, \"ecb\", NULL);\n mcrypt_generic_init(blowfish, key, 28, NULL);\n \n unsigned long long length = atoll(queryGetParam(header, \"SZ\")) - 522;\n unsigned long long position = 0;\n size_t readsize;\n size_t writesize;\n static char buffer[65536];\n vfy_t vfy_in, vfy_out;\n \n verifyFile_init(&vfy_in, 1);\n verifyFile_init(&vfy_out, 0);\n \n while (position < length) {\n showProgress(position, length);\n\n if (length - position >= sizeof(buffer)) {\n readsize = fread(buffer, 1, sizeof(buffer), file);\n } else {\n readsize = fread(buffer, 1, length - position, file);\n }\n if (readsize <= 0) {\n if (feof(file))\n ERROR(\"Input file is too short\");\n PERROR(\"Error reading input file\");\n }\n \n verifyFile_data(&vfy_in, buffer, readsize);\n /* If the payload length is not a multiple of eight,\n * the last few bytes are stored unencrypted */\n mdecrypt_generic(blowfish, buffer, readsize - readsize % 8);\n verifyFile_data(&vfy_out, buffer, readsize);\n \n writesize = fwrite(buffer, 1, readsize, destfile);\n if (writesize != readsize)\n PERROR(\"Error writing to destination file\");\n \n position += writesize;\n }\n showProgress(1, 0);\n\n verifyFile_final(&vfy_in);\n verifyFile_final(&vfy_out);\n fputs(\"OK checksums from header match\\n\", stderr);\n \n mcrypt_generic_deinit(blowfish);\n mcrypt_module_close(blowfish);\n \n if (fclose(destfile) != 0)\n PERROR(\"Error closing destination file.\");\n\n if (opts.unlinkmode) {\n if (strcmp(filename, \"-\") != 0 &&\n stat(filename, &st) == 0 && S_ISREG(st.st_mode) &&\n strcmp(destfilename, \"-\") != 0 &&\n stat(destfilename, &st) == 0 && S_ISREG(st.st_mode)) {\n if (unlink(filename) != 0)\n PERROR(\"Cannot delete input file\");\n else\n fputs(\"info: input file has been deleted\\n\", stderr);\n }\n else {\n fputs(\"Warning: Not deleting input file (input or \"\n \"output is not a regular file)\\n\", stderr);\n }\n }\n \n free(key);\n free(destfilename);\n}\n\nvoid processFile() {\n int storeKeyphrase;\n switch (opts.action) {\n case ACTION_INFO:\n // TODO: output something nicer than just the querystring\n dumpQuerystring(header);\n break;\n case ACTION_FETCHKEY:\n fetchKeyphrase();\n break;\n case ACTION_DECRYPT:\n storeKeyphrase = 1;\n if (opts.keyphrase == NULL) {\n storeKeyphrase = 0;\n keyphrase = keycache_get(queryGetParam(header, \"FH\"));\n if (keyphrase)\n fprintf(stderr, \"Keyphrase from cache: %s\\n\", keyphrase);\n else\n fetchKeyphrase();\n }\n else {\n keyphrase = strdup(opts.keyphrase);\n }\n decryptFile();\n if (storeKeyphrase)\n keycache_put(queryGetParam(header, \"FH\"), keyphrase);\n break;\n case ACTION_VERIFY:\n verifyOnly();\n break;\n }\n}\n\nvoid usageError() {\n fputs(\"\\n\"\n \"Usage: otrtool [-h] [-v] [-i|-f|-x|-y] [-u]\\n\"\n \" [-k ] [-e ] [-p ]\\n\"\n \" [-D ] [-O ]\\n\"\n \" [ ... []]\\n\"\n \"\\n\"\n \"MODES OF OPERATION\\n\"\n \" -i | Display information about file (default action)\\n\"\n \" -f | Fetch keyphrase for file\\n\"\n \" -x | Decrypt file\\n\"\n \" -y | Verify only\\n\"\n \"\\n\"\n \"FREQUENTLY USED OPTIONS\\n\"\n \" -k | Do not fetch keyphrase, use this one\\n\"\n \" -D | Output folder\\n\"\n \" -O | Output file (overrides -D)\\n\"\n \" -u | Delete otrkey-files after successful decryption\\n\"\n \"\\n\"\n \"See otrtool(1) for further information\\n\", stderr);\n}\n\nint main(int argc, char *argv[]) {\n fputs(\"OTR-Tool, \" VERSION \"\\n\", stderr);\n\n int i;\n int opt;\n while ( (opt = getopt(argc, argv, \"hvgifxyk:e:p:D:O:u\")) != -1) {\n switch (opt) {\n case 'h':\n usageError();\n exit(EXIT_SUCCESS);\n break;\n case 'v':\n opts.verbosity = VERB_DEBUG;\n break;\n case 'g':\n opts.guimode = 1;\n interactive = 0;\n break;\n case 'i':\n opts.action = ACTION_INFO;\n break;\n case 'f':\n opts.action = ACTION_FETCHKEY;\n break;\n case 'x':\n opts.action = ACTION_DECRYPT;\n break;\n case 'y':\n opts.action = ACTION_VERIFY;\n break;\n case 'k':\n opts.keyphrase = optarg;\n break;\n case 'e':\n opts.email = strdup(optarg);\n memset(optarg, 'x', strlen(optarg));\n break;\n case 'p':\n opts.password = strdup(optarg);\n memset(optarg, 'x', strlen(optarg));\n break;\n case 'D':\n opts.destdir = optarg;\n break;\n case 'O':\n opts.destfile = optarg;\n break;\n case 'u':\n opts.unlinkmode = 1;\n break;\n default:\n usageError();\n exit(EXIT_FAILURE);\n }\n }\n if (opts.verbosity >= VERB_DEBUG) {\n fputs(\"command line: \", stderr);\n for (i = 0; i < argc; ++i) {\n fputs(argv[i], stderr);\n fputc((i == argc - 1) ? '\\n' : ' ', stderr);\n }\n }\n \n if (optind >= argc) {\n fprintf(stderr, \"Missing argument: otrkey-file\\n\");\n usageError();\n exit(EXIT_FAILURE);\n }\n if (argc > optind + 1) {\n if (opts.destfile != NULL && strcmp(opts.destfile, \"-\") == 0) {\n i = 0;\n }\n else for (i = optind; i < argc; i++) {\n if (strcmp(argv[i], \"-\") == 0)\n break;\n }\n if (i < argc)\n ERROR(\"Usage error: piping is not possible with multiple input files\");\n }\n\n if (!isatty(2) && opts.guimode == 0) {\n logfilemode = 1;\n interactive = 0;\n }\n if (interactive) {\n if (!isatty(0)) {\n ttyfile = fopen(\"/dev/tty\", \"r\");\n if (ttyfile == NULL) {\n if (opts.verbosity >= VERB_DEBUG) perror(\"open /dev/tty\");\n interactive = 0;\n }\n }\n else ttyfile = stdin;\n }\n\n if (opts.action == ACTION_DECRYPT || opts.action == ACTION_VERIFY) {\n errno = 0;\n nice(10);\n if (errno == 0 && opts.verbosity >= VERB_DEBUG)\n fputs(\"NICE was set to 10\\n\", stderr);\n\n // I am not sure if this really catches all errors\n // If this causes problems, just delete the ionice-stuff\n #ifdef __NR_ioprio_set\n if (syscall(__NR_ioprio_set, 1, getpid(), 7 | 3 << 13) == 0\n && opts.verbosity >= VERB_DEBUG)\n fputs(\"IONICE class was set to Idle\\n\", stderr);\n #endif\n }\n if (opts.action == ACTION_FETCHKEY || opts.action == ACTION_DECRYPT) {\n keycache_open();\n }\n\n for (i = optind; i < argc; i++) {\n filename = argv[i];\n if (argc > optind + 1)\n fprintf(stderr, \"\\n==> %s <==\\n\", filename);\n openFile();\n processFile();\n if (fclose(file) != 0)\n PERROR(\"Error closing file\");\n free(header);\n }\n \n exit(EXIT_SUCCESS);\n}\n"}}
-{"repo": "rhr/ivy", "pr_number": 10, "title": "Cz staging", "state": "closed", "merged_at": "2017-08-15T17:39:52Z", "additions": 291, "deletions": 272, "files_changed": ["ivy/__init__.py", "ivy/ages.py", "ivy/align.py", "ivy/ascii.py", "ivy/autocollapse.py", "ivy/bipart.py", "ivy/chars/__init__.py", "ivy/chars/catpars.py", "ivy/chars/evolve.py", "ivy/chars/mk.py", "ivy/contrasts.py", "ivy/genbank.py", "ivy/interactive.py", "ivy/layout.py", "ivy/ltt.py", "ivy/newick.py", "ivy/nexus.py", "ivy/sequtil.py", "ivy/storage.py", "ivy/tree.py", "ivy/treebase.py", "ivy/vis/alignment.py", "ivy/vis/hardcopy.py", "ivy/vis/symbols.py", "ivy/vis/tree.py"], "files_before": {"ivy/__init__.py": "\"\"\"\nivy - a phylogenetics library and visual shell\nhttp://www.reelab.net/ivy\n\nCopyright 2010 Richard Ree \n\nRequired: ipython, matplotlib, scipy, numpy\nUseful: dendropy, biopython, etc.\n\"\"\"\n## This program is free software; you can redistribute it and/or\n## modify it under the terms of the GNU General Public License as\n## published by the Free Software Foundation; either version 3 of the\n## License, or (at your option) any later version.\n\n## This program is distributed in the hope that it will be useful, but\n## WITHOUT ANY WARRANTY; without even the implied warranty of\n## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n## General Public License for more details.\n\n## You should have received a copy of the GNU General Public License\n## along with this program. If not, see\n## .\n\nimport tree, layout, contrasts, ages\nimport bipart, genbank, nexus, newick, storage\n#import nodearray, data\nimport treebase\n#import db\n#import contrib\ntry:\n import ltt as _ltt\n ltt = _ltt.ltt\nexcept ImportError:\n pass\n\nimport chars, align, sequtil\n## try: import vis\n## except RuntimeError: pass\n", "ivy/ages.py": "\"\"\"\nCalculate node ages from branch lengths.\n\nThe function of interest is `ages2lengths`\n\"\"\"\n\ndef ages2lengths(node, node_ages, results={}):\n \"\"\"\n Convert node ages to branch lengths\n\n Args:\n node (Node): Node object\n node_ages (dict): Dict mapping nodes to ages\n Returns:\n dict: mapping of nodes to lengths\n\n \"\"\"\n for d in node.descendants():\n age = node_ages[d]\n if d.parent:\n parent_age = node_ages[d.parent]\n results[d] = parent_age - age\n return results\n\ndef min_ages(node, leaf_ages, results={}):\n \"\"\"\n Calculate minimum ages given fixed ages in leaf_ages\n\n Args:\n node (Node): A node object\n leaf_ages (dict): A dict mapping leaf nodes to ages\n Returns:\n dict: mapping of nodes to ages\n \"\"\"\n v = []\n for child in node.children:\n if child.label and (child.label in leaf_ages):\n age = leaf_ages[child.label]\n v.append(age)\n results[child] = age\n else:\n min_ages(child, leaf_ages, results)\n age = results[child]\n v.append(age)\n results[node] = max(v)\n return results\n\ndef smooth(node, node_ages, results={}):\n \"\"\"\n adjust ages of internal nodes by smoothing\n RR: I don't actually know what this function does -CZ\n \"\"\"\n if node.parent:\n parent_age = node_ages[node.parent]\n if node.children:\n max_child_age = max([ node_ages[child] for child in node.children ])\n # make the new age the average of parent and max child\n new_node_age = (parent_age + max_child_age)/2.0\n results[node] = new_node_age\n else:\n results[node] = node_ages[node]\n else:\n results[node] = node_ages[node]\n for child in node.children:\n smooth(child, node_ages, results)\n return results\n\nif __name__ == \"__main__\":\n import newick, ascii\n\n s = \"((((a,b),(c,d),(e,f)),g),h);\"\n root = newick.parse(s)\n\n leaf_ages = {\n \"a\": 3,\n \"b\": 2,\n \"c\": 4,\n \"d\": 1,\n \"e\": 3,\n \"f\": 0.5,\n \"g\": 10,\n \"h\": 5,\n }\n\n ma = min_ages(root, leaf_ages)\n d = ma\n for i in range(10):\n d = smooth(root, d)\n for node, val in ages2lengths(root, d).items():\n node.length = val\n print ascii.render(root, scaled=1)\n", "ivy/align.py": "import os\nfrom subprocess import Popen, PIPE\nfrom Bio import AlignIO\nfrom Bio.Alphabet import IUPAC\nfrom cStringIO import StringIO\nfrom tempfile import NamedTemporaryFile\n\nMUSCLE = \"/usr/bin/muscle\"\n\ndef muscle(seqs, cmd=None):\n if not cmd: cmd = MUSCLE\n assert os.path.exists(cmd)\n p = Popen([cmd], stdin=PIPE, stdout=PIPE)\n write = p.stdin.write\n for x in seqs:\n write(\">%s\\n%s\\n\" % (x.id, x.seq))\n out = p.communicate()[0]\n aln = AlignIO.read(StringIO(out), 'fasta', alphabet=IUPAC.ambiguous_dna)\n return aln\n\ndef musclep(seqs1, seqs2, cmd=\"/usr/bin/muscle\"):\n assert os.path.exists(cmd)\n f1 = NamedTemporaryFile(); f2 = NamedTemporaryFile()\n for s, f in ((seqs1, f1), (seqs2, f2)):\n write = f.file.write\n for x in s: write(\">%s\\n%s\\n\" % (x.id, x.seq))\n f1.file.flush(); f2.file.flush()\n cmd += \" -profile -in1 %s -in2 %s\" % (f1.name, f2.name)\n p = Popen(cmd.split(), stdout=PIPE)\n out = p.communicate()[0]\n aln = AlignIO.read(StringIO(out), 'fasta', alphabet=IUPAC.ambiguous_dna)\n f1.file.close(); f2.file.close()\n return aln\n \ndef read(data, format=None, name=None):\n from types import StringTypes\n \n def strip(s):\n fname = os.path.split(s)[-1]\n head, tail = os.path.splitext(fname)\n tail = tail.lower()\n if tail in (\".fasta\", \".nex\", \".nexus\"):\n return head\n else:\n return fname\n\n if (not format):\n if (type(data) in StringTypes) and os.path.isfile(data):\n s = data.lower()\n if s.endswith(\"fasta\"):\n format=\"fasta\"\n for tail in \".nex\", \".nexus\":\n if s.endswith(tail):\n format=\"nexus\"\n break\n\n if (not format):\n format = \"fasta\"\n\n if type(data) in StringTypes:\n if os.path.isfile(data):\n name = strip(data)\n with open(data) as f:\n return AlignIO.read(f, format, alphabet=IUPAC.ambiguous_dna)\n else:\n f = StringIO(data)\n return AlignIO.read(f, format, alphabet=IUPAC.ambiguous_dna)\n\n elif (hasattr(data, \"tell\") and hasattr(data, \"read\")):\n treename = strip(getattr(data, \"name\", None))\n return AlignIO.read(data, format, alphabet=IUPAC.ambiguous_dna)\n\n raise IOError, \"unable to read alignment from '%s'\" % data\n\ndef write(data, f, format='fasta'):\n AlignIO.write(data, f, format)\n \ndef find(aln, substr):\n \"\"\"\n generator that yields (seqnum, pos) tuples for every position of\n ``subseq`` in `aln`\n \"\"\"\n from sequtil import finditer\n N = len(substr)\n for i, rec in enumerate(aln):\n for j in finditer(rec.seq, substr):\n yield (i,j)\n \ndef find_id(aln, regexp):\n import re\n return [ (i,s) for i, s in enumerate(aln) if re.search(regexp, s.id) ]\n \ndef gapcols(aln, c='-'):\n from numpy import array\n a = array([ list(x.seq) for x in aln ])\n for i, col in enumerate(a.T):\n s = set(col==c)\n if len(s)==1 and True in s:\n yield i\n", "ivy/ascii.py": "from array import array\nfrom layout import depth_length_preorder_traversal\n\nclass AsciiBuffer:\n def __init__(self, width, height):\n self.width = width\n self.height = height\n self._b = [ array('c', ' '*width) for line in range(height) ]\n\n def putstr(self, r, c, s):\n assert r < self.height\n assert c+len(s) <= self.width, \"%s %s %s '%s'\" % (self.width, r, c, s)\n self._b[r][c:c+len(s)] = array('c', s)\n\n def __str__(self):\n return \"\\n\".join([ b.tostring() for b in self._b ])\n\ndef sum_to_root(node, internodes=True, length=False):\n \"\"\"\n Number of branches from node to root.\n\n Args:\n node (Node): A Node object\n RR: Do internodes and length do anything in this function? -CZ\n Returns:\n int: The number of branches from node to root.\n \"\"\"\n i = 0\n n = node\n while 1:\n if not n.parent:\n break\n else:\n n = n.parent\n i += 1\n return i\n\n## def depth_length_preorder_traversal(node):\n## if not node.parent:\n## node.depth = 0\n## node.length_to_root = 0.0\n## else:\n## p = node.parent\n## node.depth = p.depth + 1\n## node.length_to_root = p.length_to_root + (node.length or 0.0)\n## for ch in node.children:\n## depth_length_preorder_traversal(ch)\n\ndef smooth_cpos(node, n2c):\n for ch in node.children:\n smooth_cpos(ch, n2c)\n\n if node.parent and not node.isleaf:\n px = n2c[node.parent].c\n cx = min([ n2c[ch].c for ch in node.children ])\n dxp = n2c[node].c - px\n cxp = cx - n2c[node].c\n node.c = int(px + (cx - px)*0.5)\n\ndef scale_cpos(node, n2c, scalef, root_offset):\n if node.parent:\n n2c[node].c = n2c[node.parent].c + int(node.length * scalef)\n else:\n n2c[node].c = root_offset\n\n for ch in node.children:\n scale_cpos(ch, n2c, scalef, root_offset)\n\ndef set_rpos(node, n2c):\n for child in node.children:\n set_rpos(child, n2c)\n nc = n2c[node]\n if node.children:\n children = node.children\n c0 = n2c[children[0]]\n c1 = n2c[children[-1]]\n rmin = c0.r; rmax = c1.r\n nc.r = int(rmin + (rmax-rmin)/2.0)\n\ndef render(root, unitlen=3, minwidth=50, maxwidth=None, scaled=False,\n show_internal_labels=True):\n \"\"\"\n Create the ascii tree to be shown with print()\n \"\"\"\n n2c = depth_length_preorder_traversal(root)\n leaves = root.leaves(); nleaves = len(leaves)\n maxdepth = max([ n2c[lf].depth for lf in leaves ])\n max_labelwidth = max([ len(lf.label) for lf in leaves ]) + 1\n\n root_offset = 0\n if root.label and show_internal_labels:\n root_offset = len(root.label)\n\n width = maxdepth*unitlen + max_labelwidth + 2 + root_offset\n height = 2*nleaves - 1\n\n if width < minwidth:\n unitlen = (minwidth - max_labelwidth - 2 - root_offset)/maxdepth\n width = maxdepth*unitlen + max_labelwidth + 2 + root_offset\n\n buf = AsciiBuffer(width, height)\n\n for i, lf in enumerate(leaves):\n c = n2c[lf]\n c.c = width - max_labelwidth - 2\n c.r = i*2\n\n for node in root.postiter():\n nc = n2c[node]\n if node.children:\n children = node.children\n c0 = n2c[children[0]]\n c1 = n2c[children[-1]]\n rmin = c0.r; rmax = c1.r\n nc.r = int(rmin + (rmax-rmin)/2.0)\n nc.c = min([ n2c[ch].c for ch in children ]) - unitlen\n\n if not scaled:\n smooth_cpos(root, n2c)\n else:\n maxlen = max([ n2c[lf].length_to_root for lf in leaves ])\n scalef = (n2c[leaves[0]].c + 1 - root_offset)/maxlen\n scale_cpos(root, n2c, scalef, root_offset)\n\n for node in root.postiter():\n nc = n2c[node]\n if node.parent:\n pc = n2c[node.parent]\n for r in range(min([nc.r, pc.r]),\n max([nc.r, pc.r])):\n buf.putstr(r, pc.c, \":\")\n\n sym = getattr(nc, \"hchar\", \"-\")\n vbar = sym*(nc.c-pc.c)\n buf.putstr(nc.r, pc.c, vbar)\n\n if node.isleaf:\n buf.putstr(nc.r, nc.c+1, \" \"+node.label)\n else:\n if node.label and show_internal_labels:\n buf.putstr(nc.r, nc.c-len(node.label), node.label)\n\n buf.putstr(nc.r, nc.c, \"+\")\n\n return str(buf)\n\nif __name__ == \"__main__\":\n import random, tree\n rand = random.Random()\n\n t = tree.read(\n \"(foo,((bar,(dog,cat)dc)dcb,(shoe,(fly,(cow, bowwow)cowb)cbf)X)Y)Z;\"\n )\n\n #t = tree.read(\"(((foo:4.6):5.6, (bar:6.5, baz:2.3):3.0):3.0);\")\n #t = tree.read(\"(foo:4.6, (bar:6.5, baz:2.3)X:3.0)Y:3.0;\")\n\n i = 1\n print render(t, scaled=0, show_internal_labels=1)\n r = t.get(\"cat\").parent\n tree.reroot(t, r)\n tp = t.parent\n tp.remove_child(t)\n c = t.children[0]\n t.remove_child(c)\n tp.add_child(c)\n print render(r, scaled=0, show_internal_labels=1)\n", "ivy/autocollapse.py": "\"\"\"\nFor drawing big trees. Calculate which clades can be 'collapsed' and\ndisplayed with a placeholder.\n\nTODO: test and develop this module further\n\"\"\"\nfrom storage import Storage\n\ndef autocollapse_info(node, collapsed, visible=True, info={}):\n \"\"\"\n gather information to determine if a node should be collapsed\n\n *collapsed* is a set containing nodes that are already collapsed\n \"\"\"\n if node not in info:\n s = Storage()\n info[node] = s\n else:\n s = info[node]\n \n if visible and (node in collapsed):\n visible = False\n \n nnodes = 1 # total number of nodes, including node\n # number of visible leaves\n nvisible = int((visible and node.isleaf) or (node in collapsed))\n ntips = int(node.isleaf)\n ntips_visible = int(node.isleaf and visible)\n s.has_labeled_descendant = False\n s.depth = 1\n\n for child in node.children:\n autocollapse_info(child, collapsed, visible, info)\n cs = info[child]\n nnodes += cs.nnodes\n nvisible += cs.nvisible\n ntips += cs.ntips\n ntips_visible += cs.ntips_visible\n if (child.label and (not child.isleaf)) \\\n or (cs.has_labeled_descendant):\n s.has_labeled_descendant = True\n if cs.depth >= s.depth:\n s.depth = cs.depth+1\n s.nnodes = nnodes\n s.nvisible = nvisible\n s.ntips = ntips\n s.ntips_visible = ntips_visible\n return info\n\ndef autocollapse(root, collapsed=None, keep_visible=None, max_visible=1000):\n \"\"\"\n traverse a tree and find nodes that should be collapsed in order\n to satify *max_visible*\n\n *collapsed* is a set object for storing collapsed nodes\n\n *keep_visible* is a set object of nodes that should not be placed\n in *collapsed*\n \"\"\"\n collapsed = collapsed or set()\n keep_visible = keep_visible or set()\n ntries = 0\n while True:\n if ntries > 10:\n return\n info = autocollapse_info(root, collapsed)\n nvisible = info[root].nvisible\n if nvisible <= max_visible:\n return\n \n v = []\n for node in root.iternodes():\n s = info[node]\n if (node.label and (not node.isleaf) and node.parent and\n (node not in keep_visible)):\n w = s.nvisible/float(s.depth)\n if s.has_labeled_descendant:\n w *= 0.25\n v.append((w, node, s))\n v.sort(); v.reverse()\n for w, node, s in v:\n if node not in keep_visible and s.nvisible < (nvisible-1):\n print node\n collapsed.add(node)\n nvisible -= s.nvisible\n if nvisible <= max_visible:\n break\n ntries += 1\n return collapsed\n", "ivy/bipart.py": "import sys\nfrom pprint import pprint\nfrom glob import glob\nfrom storage import Storage\nfrom collections import defaultdict\n\n## class BipartSet(object):\n## \"A set of bipartitions\"\n## def __init__(self, elements):\n## self.elements = frozenset(elements)\n## self.ref = sorted(elements)[0]\n## self.node2bipart = Storage()\n\n## def add(self, subset, node):\n## # filter out elements of subset not in 'elements'\n## subset = (frozenset(subset) & self.elements)\n## if self.ref not in self.subset:\n## self.subset = self.elements - self.subset\n\nclass Bipart(object):\n \"\"\"\n A class representing a bipartition.\n \"\"\"\n def __init__(self, elements, subset, node=None, support=None):\n \"\"\"\n 'elements' and 'subset' are set objects\n \"\"\"\n self.subset = subset\n self.compute(elements)\n self.node = node\n self.support = support\n\n def __hash__(self):\n return self._hash\n\n def __eq__(self, other):\n assert self.elements == other.elements\n return ((self.subset == other.subset) or\n (self.subset == (self.elements - other.subset)))\n\n def __repr__(self):\n v = sorted(self.subset)\n return \"(%s)\" % \" \".join(map(str, v))\n\n def compute(self, elements):\n self.elements = frozenset(elements)\n self.ref = sorted(elements)[0]\n # filter out elements of subset not in 'elements'\n self.subset = (frozenset(self.subset) & self.elements)\n self._hash = hash(self.subset)\n if self.ref not in self.subset:\n self.subset = self.elements - self.subset\n self.complement = self.elements - self.subset\n\n def iscompatible(self, other):\n ## assert self.elements == other.elements\n if (self.subset.issubset(other.subset) or\n other.subset.issubset(self.subset)):\n return True\n if (((self.subset | other.subset) == self.elements) or\n (not (self.subset & other.subset))):\n return True\n return False\n\ndef conflict(bp1, bp2, support=None):\n if ((support and (bp1.support >= support) and (bp2.support >= support))\n or (not support)):\n if not bp1.iscompatible(bp2):\n return True\n return False\n\nclass TreeSet:\n def __init__(self, root, elements=None):\n self.root = root\n self.node2labels = root.leafsets(labels=True)\n self.elements = elements or self.node2labels.pop(root)\n self.biparts = [ Bipart(self.elements, v, node=k,\n support=int(k.label or 0))\n for k, v in self.node2labels.items() ]\n\ndef compare_trees(r1, r2, support=None):\n e = (set([ x.label for x in r1.leaves() ]) &\n set([ x.label for x in r2.leaves() ]))\n bp1 = [ Bipart(e, v, node=k, support=int(k.label or 0))\n for k, v in r1.leafsets(labels=True).items() ]\n bp2 = [ Bipart(e, v, node=k, support=int(k.label or 0))\n for k, v in r2.leafsets(labels=True).items() ]\n return compare(bp1, bp2, support)\n\ndef compare(set1, set2, support=None):\n hits1 = []; hits2 = []\n conflicts1 = defaultdict(set); conflicts2 = defaultdict(set)\n for bp1 in set1:\n for bp2 in set2:\n if bp1 == bp2:\n hits1.append(bp1.node); hits2.append(bp2.node)\n if conflict(bp1, bp2, support):\n conflicts1[bp1.node].add(bp2.node)\n conflicts2[bp2.node].add(bp1.node)\n return hits1, hits2, conflicts1, conflicts2\n \n## a = Bipart(\"abcdef\", \"abc\")\n## b = Bipart(\"abcdef\", \"def\")\n## c = Bipart(\"abcdef\", \"ab\")\n## d = Bipart(\"abcdef\", \"cd\")\n## print a == b\n## print a.iscompatible(b)\n## print a.iscompatible(c)\n## print a.iscompatible(d)\n## print c.iscompatible(d)\n## sys.exit() \n", "ivy/chars/__init__.py": "import mk, catpars, evolve\n", "ivy/chars/catpars.py": "import scipy, numpy\n\ndef default_costmatrix(numstates, dtype=numpy.int):\n \"a square array with zeroes along the diagonal, ones elsewhere\"\n return scipy.logical_not(scipy.identity(numstates)).astype(float)\n\ndef minstates(v):\n \"return the indices of v that equal the minimum\"\n return scipy.nonzero(scipy.equal(v, min(v)))\n\ndef downpass(node, states, stepmatrix, chardata, node2dpv=None):\n if node2dpv is None:\n node2dpv = {}\n \n if not node.isleaf:\n for child in node.children:\n downpass(child, states, stepmatrix, chardata, node2dpv)\n\n dpv = scipy.zeros([len(states)])\n node2dpv[node] = dpv\n for i in states:\n for child in node.children:\n child_dpv = node2dpv[child]\n mincost = min([ child_dpv[j] + stepmatrix[i,j] \\\n for j in states ])\n dpv[i] += mincost\n \n #print node.label, node.dpv\n\n else:\n #print node.label, chardata[node.label]\n node2dpv[node] = stepmatrix[:,chardata[node.label]]\n\n return node2dpv\n \n\ndef uppass(node, states, stepmatrix, node2dpv, node2upm={},\n node2ancstates=None):\n parent = node.parent\n if not node.isleaf:\n if parent is None: # root\n dpv = node2dpv[node]\n upm = None\n node.mincost = min(dpv)\n node2ancstates = {node: minstates(dpv)}\n \n else:\n M = scipy.zeros(stepmatrix.shape)\n for i in states:\n sibs = [ c for c in parent.children if c is not node ]\n for j in states:\n c = 0\n for sib in sibs:\n sibdpv = node2dpv[sib]\n c += min([ sibdpv[x] + stepmatrix[j,x]\n for x in states ])\n c += stepmatrix[j,i]\n\n p_upm = node2upm.get(parent)\n if p_upm is not None:\n c += min(p_upm[j])\n\n M[i,j] += c\n \n node2upm[node] = M\n\n v = node2dpv[node][:]\n for s in states:\n v[s] += min(M[s])\n node2ancstates[node] = minstates(v)\n\n for child in node.children:\n uppass(child, states, stepmatrix, node2dpv, node2upm,\n node2ancstates)\n\n return node2ancstates\n \ndef ancstates(tree, chardata, stepmatrix):\n states = range(len(stepmatrix))\n return uppass(tree, states, stepmatrix,\n downpass(tree, states, stepmatrix, chardata))\n\ndef _bindeltran(node, stepmatrix, node2dpv, node2deltr=None, ancstate=None):\n if node2deltr is None:\n node2deltr = {}\n\n dpv = node2dpv[node]\n if ancstate is not None:\n c, s = min([ (cost+stepmatrix[ancstate,i], i) \\\n for i, cost in enumerate(dpv) ])\n else:\n c, s = min([ (cost, i) for i, cost in enumerate(dpv) ])\n \n node2deltr[node] = s\n for child in node.children:\n _bindeltran(child, stepmatrix, node2dpv, node2deltr, s)\n\n return node2deltr\n \ndef binary_deltran(tree, chardata, stepmatrix):\n states = range(len(stepmatrix))\n node2dpv = downpass(tree, states, stepmatrix, chardata)\n node2deltr = _bindeltran(tree, stepmatrix, node2dpv)\n return node2deltr\n \n\nif __name__ == \"__main__\":\n from pprint import pprint\n from ivy import tree\n root = tree.read(\"(a,((b,c),(d,(e,f))));\")\n\n nstates = 4\n states = range(nstates)\n cm = default_costmatrix(nstates)\n chardata = dict(zip(\"abcdef\", map(int, \"000233\")))\n dp = downpass(root, states, cm, chardata)\n\n for i, node in enumerate(root):\n if not node.label:\n node.label = \"N%s\" % i\n else:\n node.label = \"%s (%s)\" % (node.label, chardata[node.label])\n\n print ascii.render(root)\n \n\n## nstates = 2\n## leaves = tree.leaves() \n## for leaf in leaves:\n## leaf.anc_cost_vector = chardata[leaf.label]\n\n pprint(\n #ancstates(root, chardata, cm)\n #uppass(root, states, cm, downpass(tree, states, cm, chardata))\n dp\n )\n\n\n", "ivy/chars/evolve.py": "#!/usr/bin/env python\n\"\"\"\nFunctions for evolving traits and trees.\n\"\"\"\ndef brownian(root, sigma=1.0, init=0.0, values={}):\n \"\"\"\n Recursively evolve a trait by Brownian motion up from the node\n *root*.\n\n * *sigma*: standard deviation of the normal random variate after\n one unit of branch length\n\n * *init*: initial value\n\n Returns: *values* - a dictionary mapping nodes to evolved values\n \"\"\"\n from scipy.stats import norm\n values[root] = init\n for child in root.children:\n time = child.length\n random_step = norm.rvs(init, scale=sigma*time)\n brownian(child, sigma, random_step, values)\n return values\n\ndef test_brownian():\n \"\"\"\n Evolve a trait up an example tree of primates:.\n\n ((((Homo:0.21,Pongo:0.21)N1:0.28,Macaca:0.49)N2:0.13,\n Ateles:0.62)N3:0.38,Galago:1.00)root;\n\n Returns: (*root*, *data*) - the root node and evolved data.\n \"\"\"\n import newick\n root = newick.parse(\n \"((((Homo:0.21,Pongo:0.21)N1:0.28,Macaca:0.49)N2:0.13,\"\\\n \"Ateles:0.62)N3:0.38,Galago:1.00)root;\"\n )\n print root.ascii(scaled=True) \n evolved = brownian(root)\n for node in root.iternodes():\n print node.label, evolved[node]\n return root, evolved\n\nif __name__ == \"__main__\":\n test_brownian()\n", "ivy/chars/mk.py": "\"\"\"\nCategorical Markov models with k states.\n\"\"\"\nimport numpy, scipy, random\nimport scipy.linalg\nimport scipy.optimize\nfrom scipy import array, zeros, ones\nfrom scipy.linalg import expm#, expm2, expm3\nfrom math import log, exp\nrand = random.Random()\nuniform = rand.uniform; expovariate = rand.expovariate\n\nLARGE = 10e10 # large -lnL value used to bound parameter optimization\n\nclass Q:\n def __init__(self, k=2, layout=None):\n \"\"\"\n Represents a square transition matrix with k states.\n \n 'layout' is a square (k,k) array of integers that index free\n rate parameters (values on the diagonal are ignored). Cells\n with value 0 will have the first rate parameter, 1 the\n second, etc.\n \"\"\"\n self.k = k\n self.range = range(k)\n self.offdiag = array(numpy.eye(k)==0, dtype=numpy.int)\n if layout is None:\n layout = zeros((k,k), numpy.int)\n self.layout = layout*self.offdiag\n\n def fill(self, rates):\n m = numpy.take(rates, self.layout)*self.offdiag\n v = m.sum(1) * -1\n for i in self.range:\n m[i,i] = v[i]\n return m\n\n def default_priors(self):\n p = 1.0/self.k\n return [p]*self.k\n\ndef sample_weighted(weights):\n u = uniform(0, sum(weights))\n x = 0.0\n for i, w in enumerate(weights):\n x += w\n if u < x:\n break\n return i\n\ndef conditionals(root, data, Q):\n nstates = Q.shape[0]\n states = range(nstates)\n nodes = [ x for x in root.postiter() ]\n nnodes = len(nodes)\n v = zeros((nnodes,nstates))\n n2i = {}\n \n for i, n in enumerate(nodes):\n n2i[n] = i\n if n.isleaf:\n state = data[n.label]\n try:\n state = int(state)\n v[i,state] = 1.0\n except ValueError:\n if state == '?' or state == '-':\n v[i,:] += 1/float(nstates)\n else:\n Pv = [ (expm(Q*child.length)*v[n2i[child]]).sum(1)\n for child in n.children ]\n v[i] = numpy.multiply(*Pv)\n # fossils\n state = None\n if n.label in data:\n state = int(data[n.label])\n elif n in data:\n state = int(data[n])\n if state != None:\n for s in states:\n if s != state: v[i,s] = 0.0\n \n return dict([ (n, v[i]) for n,i in n2i.items() ])\n\ndef contrasts(root, data, Q):\n cond = conditionals(root, data, Q)\n d = {}\n for n in root.postiter(lambda x:x.children):\n nc = cond[n]; nc /= sum(nc)\n diff = 0.0\n for child in n.children:\n cc = cond[child]; cc /= sum(cc)\n diff += numpy.sum(numpy.abs(nc-cc))\n d[n] = diff\n return d\n\ndef lnL(root, data, Q, priors):\n d = conditionals(root, data, Q)\n return numpy.log(sum(d[root]*priors))\n\ndef optimize(root, data, Q, priors=None):\n Qfill = Q.fill\n if priors is None: priors = Q.default_priors()\n def f(params):\n if (params<0).any(): return LARGE\n return -lnL(root, data, Qfill(params), priors)\n \n # initial parameter values\n p = [1.0]*len(set(Q.layout.flat))\n\n v = scipy.optimize.fmin_powell(\n f, p, full_output=True, disp=0, callback=None\n )\n params, neglnL = v[:2]\n if neglnL == LARGE:\n raise Exception(\"ConvergenceError\")\n return params, neglnL\n\ndef sim(root, n2p, s0, d=None):\n if d is None:\n d = {root:s0}\n for n in root.children:\n v = n2p[n][s0]\n i = sample_weighted(v)\n d[n] = i\n sim(n, n2p, i, d)\n return d\n\ndef stmap(root, states, ancstates, Q, condition_on_success):\n \"\"\"\n This and its dependent functions below need testing and\n optimization.\n \"\"\"\n results = []\n for n in root.descendants():\n si = ancstates[n.parent]\n sj = ancstates[n]\n v = simulate_on_branch(states, si, sj, Q, n.length,\n condition_on_success)\n print n, si, sj\n if v:\n results.append(v)\n else:\n return None\n return results\n\ndef simulate_on_branch(states, si, sj, Q, brlen, condition_on_success):\n point = 0.0\n history = [(si, point)]\n if si != sj: # condition on one change occurring\n lambd = -(Q[si,si])\n U = uniform(0.0, 1.0)\n # see appendix of Nielsen 2001, Genetics\n t = brlen - point\n newpoint = -(1.0/lambd) * log(1.0 - U*(1.0 - exp(-lambd * t)))\n newstate = draw_new_state(states, Q, si)\n history.append((newstate, newpoint))\n si = newstate; point = newpoint\n while 1:\n lambd = -(Q[si,si])\n rv = expovariate(lambd)\n newpoint = point + rv\n\n if newpoint <= brlen: # state change along branch\n newstate = draw_new_state(states, Q, si)\n history.append((newstate, newpoint))\n si = newstate; point = newpoint\n else:\n history.append((si, brlen))\n break\n \n if si == sj or (not condition_on_success): # success\n return history\n\n return None\n \ndef draw_new_state(states, Q, si):\n \"\"\"\n Given a rate matrix Q, a starting state si, and an ordered\n sequence of states, eg (0, 1), draw a new state sj with\n probability -(qij/qii)\n \"\"\"\n Qrow = Q[si]\n qii = Qrow[si]\n qij_probs = [ (x, -(Qrow[x]/qii)) for x in states if x != si ]\n uni = uniform(0.0, 1.0)\n val = 0.0\n for sj, prob in qij_probs:\n val += prob\n if uni < val:\n return sj\n \ndef sample_ancstates(node, states, conditionals, n2p, fixed={}):\n \"\"\"\n Sample ancestral states from their conditional likelihoods\n \"\"\"\n ancstates = {}\n for n in node.preiter():\n if n in fixed:\n state = fixed[n]\n else:\n cond = conditionals[n]\n\n if n.parent:\n P = n2p[n]\n ancst = ancstates[n.parent]\n newstate_Prow = P[ancst]\n cond *= newstate_Prow\n\n cond /= sum(cond)\n\n rv = uniform(0.0, 1.0)\n v = 0.0\n for state, c in zip(states, cond):\n v += c\n if rv < v:\n break\n ancstates[n] = state\n\n return ancstates\n", "ivy/contrasts.py": "\"\"\"\nCalculate independent contrasts\n\nTODO: include utilities for transforming data, etc.\n\"\"\"\ndef PIC(node, data, results={}):\n \"\"\"\n Phylogenetic independent contrasts.\n\n Recursively calculate independent contrasts of a bifurcating node\n given a dictionary of trait values.\n\n Args:\n node (Node): A node object\n data (dict): Mapping of leaf names to character values\n\n Returns:\n dict: Mapping of internal nodes to tuples containing ancestral\n state, its variance (error), the contrast, and the\n contrasts's variance.\n\n TODO: modify to accommodate polytomies.\n \"\"\"\n X = []; v = []\n for child in node.children:\n if child.children:\n PIC(child, data, results)\n child_results = results[child]\n X.append(child_results[0])\n v.append(child_results[1])\n else:\n X.append(data[child.label])\n v.append(child.length)\n\n Xi, Xj = X # Xi - Xj is the contrast value\n vi, vj = v\n\n # Xk is the reconstructed state at the node\n Xk = ((1.0/vi)*Xi + (1/vj)*Xj) / (1.0/vi + 1.0/vj)\n\n # vk is the variance\n vk = node.length + (vi*vj)/(vi+vj)\n\n results[node] = (Xk, vk, Xi-Xj, vi+vj)\n\n return results\n\nif __name__ == \"__main__\":\n import tree\n n = tree.read(\n \"((((Homo:0.21,Pongo:0.21)N1:0.28,Macaca:0.49)N2:0.13,\"\\\n \"Ateles:0.62)N3:0.38,Galago:1.00)N4:0.0;\"\n )\n char1 = {\n \"Homo\": 4.09434,\n \"Pongo\": 3.61092,\n \"Macaca\": 2.37024,\n \"Ateles\": 2.02815,\n \"Galago\": -1.46968\n }\n\n for k, v in PIC(n, char1).items():\n print k.label or k.id, v\n", "ivy/genbank.py": "import re, sys, logging\nfrom collections import defaultdict\nfrom itertools import izip_longest, ifilter\nfrom Bio import Entrez, SeqIO\nfrom Bio.Blast import NCBIWWW, NCBIXML\nfrom ivy.storage import Storage\n\nemail = \"\"\n\ndef batch(iterable, size):\n \"\"\"\n Take an iterable and return it in chunks (sub-iterables)\n\n Args:\n iterable: Any iterable\n size (int): Size of chunks\n Yields:\n Chunks of size `size`\n \"\"\"\n args = [ iter(iterable) ]*size\n for x in izip_longest(fillvalue=None, *args):\n yield ifilter(None, x)\n\ndef extract_gbac(s):\n \"\"\"\n Extract genbank accession\n\n Args:\n s (str): text string of genbank file\n Returns:\n list: Accession number(s)\n \"\"\"\n gbac_re = re.compile(r'[A-Z]{1,2}[0-9]{4,7}')\n return gbac_re.findall(s, re.M)\n # RR: This also returns various other strings that match the pattern (eg.\n # protein ids)\n\ndef extract_gene(seq, gene):\n \"\"\"\n RR: Not sure what format seq should be in -CZ\n \"\"\"\n for t in \"exon\", \"gene\":\n for x in seq.features:\n if x.type == t:\n v = x.qualifiers.get(\"gene\")\n if v == [gene]:\n if x.sub_features:\n s = [ seq[sf.location.start.position:\n sf.location.end.position]\n for sf in x.sub_features ]\n return reduce(lambda x,y:x+y, s)\n else:\n loc = x.location\n return seq[loc.start.position-10:loc.end.position+10]\n\ndef gi2webenv(gilist):\n h = Entrez.esearch(\n db=\"nucleotide\", term=\" OR \".join(gilist), usehistory=\"y\",\n retmax=len(gilist)\n )\n d = Entrez.read(h)\n return d[\"WebEnv\"], d[\"QueryKey\"]\n\ndef gi2tax(gi):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.elink(dbfrom='taxonomy', db='nucleotide', from_uid=gi,\n LinkName='nucleotide_taxonomy')\n r = Entrez.read(h)[0]\n h.close()\n i = r['LinkSetDb'][0]['Link'][0]['Id']\n h = Entrez.efetch(db='taxonomy', id=i, retmode='xml')\n r = Entrez.read(h)[0]\n h.close()\n return r\n\ndef ac2gi(ac):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.esearch(db=\"nucleotide\", term=ac, retmax=1)\n d = Entrez.read(h)['IdList'][0]\n h.close()\n return d\n\ndef acsum(aclist, batchsize=100):\n \"\"\"\n fetch esummary info for list of accession numbers -- useful for\n getting gi and taxids\n \"\"\"\n global email\n assert email, \"set email!\"\n Entrez.email = email\n results = {}\n for v in batch(aclist, batchsize):\n v = list(v)\n h = Entrez.esearch(\n db=\"nucleotide\", retmax=len(v),\n term=\" OR \".join([ \"%s[ACCN]\" % x for x in v ]),\n usehistory=\"y\"\n )\n d = Entrez.read(h)\n h.close()\n # gis, but not in order of aclist\n gis = d['IdList']\n d = Entrez.read(Entrez.esummary(db='nucleotide', id=','.join(gis)),\n validate=False)\n for x in d:\n ac = x['Caption']\n if ac in aclist:\n results[ac] = x\n return results\n\ndef fetch_aclist(aclist, batchsize=1000):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n results = {}\n n = 0\n for v in batch(aclist, batchsize):\n v = list(v)\n h = Entrez.esearch(\n db=\"nucleotide\",\n term=\" OR \".join([ \"%s[ACCN]\" % x for x in v ]),\n usehistory=\"y\"\n )\n d = Entrez.read(h)\n h.close()\n h = Entrez.efetch(db=\"nucleotide\", rettype=\"gb\", retmax=len(v),\n webenv=d[\"WebEnv\"], query_key=d[\"QueryKey\"])\n seqs = SeqIO.parse(h, \"genbank\")\n for s in seqs:\n try:\n ac = s.annotations[\"accessions\"][0]\n if ac in aclist:\n results[ac] = s\n except:\n pass\n h.close()\n n += len(v)\n logging.info('fetched %s sequences', n)\n return results\n\ndef fetch_gilist(gilist, batchsize=1000):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n results = {}\n for v in batch(gilist, batchsize):\n v = map(str, v)\n h = Entrez.epost(db=\"nucleotide\", id=\",\".join(v), usehistory=\"y\")\n d = Entrez.read(h)\n h.close()\n h = Entrez.efetch(db=\"nucleotide\", rettype=\"gb\", retmax=len(v),\n webenv=d[\"WebEnv\"], query_key=d[\"QueryKey\"])\n seqs = SeqIO.parse(h, \"genbank\")\n for s in seqs:\n try:\n gi = s.annotations[\"gi\"]\n if gi in v:\n s.id = organism_id(s)\n results[gi] = s\n except:\n pass\n h.close()\n return results\n\ndef organism_id(s):\n org = (s.annotations.get('organism') or '').replace('.', '')\n return '%s_%s' % (org.replace(' ','_'), s.id.split('.')[0])\n\ndef fetchseq(gi):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.efetch(db=\"nucleotide\", id=str(gi), rettype=\"gb\")\n s = SeqIO.read(h, 'genbank')\n s.id = organism_id(s)\n return s\n\ndef create_fastas(data, genes):\n fastas = dict([ (g, file(g+\".fasta\", \"w\")) for g in genes ])\n for label, seqs in data.items():\n for gene, s in zip(genes, seqs):\n if s and type(s) != str:\n tag = None\n try:\n tag = \"%s_%s\" % (label, s.annotations[\"accessions\"][0])\n except:\n tag = \"%s_%s\" % (label, s.name)\n if tag:\n fastas[gene].write(\">%s\\n%s\\n\" % (tag, s.seq))\n else:\n sys.stderr.write((\"error: not an accession number? \"\n \"%s (%s %s)\\n\" % (s, label, gene)))\n\n for f in fastas.values(): f.close()\n\ndef merge_fastas(fnames, name=\"merged\"):\n outfile = file(name+\".phy\", \"w\")\n gene2len = {}\n d = defaultdict(dict)\n for fn in fnames:\n gene = fn.split(\".\")[0]\n for rec in SeqIO.parse(file(fn), \"fasta\"):\n #sp = \"_\".join(rec.id.split(\"_\")[:2])\n if rec.id.startswith(\"Pedicularis\"):\n sp = rec.id.split(\"_\")[1]\n else:\n sp = rec.id.split(\"_\")[0]\n sp = \"_\".join(rec.id.split(\"_\")[:-1])\n seq = str(rec.seq)\n d[sp][gene] = seq\n if gene not in gene2len:\n gene2len[gene] = len(seq)\n\n ntax = len(d)\n nchar = sum(gene2len.values())\n outfile.write(\"%s %s\\n\" % (ntax, nchar))\n genes = list(sorted(gene2len.keys()))\n for sp, data in sorted(d.items()):\n s = \"\".join([ (data.get(gene) or \"\".join([\"?\"]*gene2len[gene]))\n for gene in genes ])\n outfile.write(\"%s %s\\n\" % (sp, s))\n outfile.close()\n parts = file(name+\".partitions\", \"w\")\n i = 1\n for g in genes:\n n = gene2len[g]\n parts.write(\"DNA, %s = %s-%s\\n\" % (g, i, i+n-1))\n i += n\n parts.close()\n\ndef blast_closest(fasta, e=10):\n f = NCBIWWW.qblast(\"blastn\", \"nr\", fasta, expect=e, hitlist_size=1)\n rec = NCBIXML.read(f)\n d = rec.descriptions[0]\n result = Storage()\n gi = re.findall(r'gi[|]([0-9]+)', d.title) or None\n if gi: result.gi = int(gi[0])\n ac = re.findall(r'gb[|]([^|]+)', d.title) or None\n if ac: result.ac = ac[0].split(\".\")[0]\n result.title = d.title.split(\"|\")[-1].strip()\n return result\n\ndef blast(query, e=10, n=100, entrez_query=\"\"):\n f = NCBIWWW.qblast(\"blastn\", \"nr\", query, expect=e, hitlist_size=n,\n entrez_query=entrez_query)\n recs = NCBIXML.parse(f)\n return recs\n ## v = []\n ## for d in rec.descriptions:\n ## result = Storage()\n ## gi = re.findall(r'gi[|]([0-9]+)', d.title) or None\n ## if gi: result.gi = int(gi[0])\n ## ac = re.findall(r'gb[|]([^|]+)', d.title) or None\n ## if ac: result.ac = ac[0].split(\".\")[0]\n ## result.title = d.title.split(\"|\")[-1].strip()\n ## v.append(result)\n ## return v\n\ndef start_codons(seq):\n i = seq.find('ATG')\n while i != -1:\n yield i\n i = seq.find('ATG', i+3)\n\ndef search_taxonomy(q):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.esearch(db=\"taxonomy\", term=q)\n return Entrez.read(h)['IdList']\n\ndef fetchtax(taxid):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n n = 1\n if not isinstance(taxid, int):\n # string, possibly with multiple values?\n try:\n taxid = taxid.strip()\n n = taxid.count(',') + 1\n except AttributeError:\n # iterable of values?\n try:\n n = len(taxid)\n taxid = ','.join(map(str, taxid))\n except TypeError:\n pass\n else:\n taxid = str(taxid)\n h = Entrez.efetch(db='taxonomy', id=taxid, retmode='xml', retmax=n)\n if n == 1:\n r = Entrez.read(h)[0]\n else:\n # a list of taxonomy results in same order of taxids\n r = Entrez.read(h)\n return r\n\n__FIRST = re.compile('[^-]')\n__LAST = re.compile('[-]*$')\ndef trimpos(rec):\n 'return the positions of the first and last ungapped base'\n s = rec.seq.tostring()\n first = __FIRST.search(s).start()\n last = __LAST.search(s).start()-1\n return (first, last)\n\ndef fetch_DNA_seqs(terms, maxn=10000, batchsize=1000):\n \"\"\"\n terms: sequence of search terms, quoted appropriately, with Entrez\n specifiers, e.g. ['\"Mus musculus\"[organism]']\n maxn: maximum number of sequences to return\n returns list of SeqRecord objects\n \"\"\"\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.esearch(db=\"nucleotide\", term=\" OR \".join(terms), usehistory=\"y\")\n d = Entrez.read(h)\n env = d['WebEnv']; key = d['QueryKey']\n N = int(d['Count'])\n if maxn: N = min(N, maxn)\n logging.info('fetching %s sequences', N)\n retstart = 0\n seqs = []\n n = 0\n while n < N:\n h = Entrez.efetch(\n db=\"nucleotide\", rettype='gb', webenv=env, query_key=key,\n retstart=retstart, retmax=batchsize\n )\n v = list(SeqIO.parse(h, \"genbank\"))\n n += len(v)\n logging.info('...fetched %s', n)\n seqs.extend(v)\n retstart += batchsize\n logging.info('...done')\n return seqs\n\ndef seqrec_taxid(seqrec):\n \"extract the NCBI taxon id from a sequence record\"\n for ft in seqrec.features:\n if ft.type == 'source':\n break\n try:\n for x in ft.qualifiers['db_xref']:\n if x.startswith('taxon:'):\n return int(x.split(':')[1])\n except:\n pass\n", "ivy/interactive.py": "#!/usr/bin/env ipython\n# -*- coding: utf-8 -*-\n\"\"\"\nAdds to the interactive IPython/pylab environment\n\"\"\"\nimport sys, os, re\nimport ivy\nimport ivy.vis\nfrom ivy.vis import symbols\n\ndef readtree(data, *args, **kwargs): return ivy.tree.read(data, *args, **kwargs)\n\ndef readaln(data, *args, **kwargs): return ivy.align.read(data, *args, **kwargs)\n\ndef treefig(*args, **kwargs):\n from ivy.vis import TreeFigure, MultiTreeFigure\n if len(args) == 1:\n fig = TreeFigure(args[0], **kwargs)\n else:\n fig = MultiTreeFigure(**kwargs)\n for arg in args:\n print arg\n fig.add(arg)\n fig.show()\n return fig\n\ndef alnfig(*args, **kwargs):\n from ivy.vis import AlignmentFigure\n return AlignmentFigure(*args, **kwargs)\n\ndef __maketree(self, s):\n import os#, IPython\n words = s.split()\n treename = \"root\"\n fname = None\n if words:\n treename = words.pop(0)\n if words and os.path.isfile(words[0]):\n fname = words.pop(0)\n\n if not fname:\n ## msg = \"\\n\".join([\n ## \"Name of tree file\",\n ## \"(Try dragging one into the terminal):\\n\"\n ## ])\n msg = \"Enter the name of a tree file or a newick string:\\n\"\n fname = raw_input(msg).strip()\n\n quotes = [\"'\", '\"']\n if fname and fname[0] in quotes:\n fname = fname[1:]\n if fname and fname[-1] in quotes:\n fname = fname[:-1]\n if fname:\n try:\n ## root = ivy.tree.read(fname)\n ## IPython.ipapi.get().to_user_ns({treename:root})\n cmd = \"%s = ivy.tree.read('%s')\" % (treename, fname)\n get_ipython().ex(cmd)\n print \"Tree parsed and assigned to variable '%s'\" % treename\n except:\n print \"Unable to parse tree file '%s'\" % fname\n else:\n print \"Cancelled\"\n\ndef __node_completer(self, event):\n symbol = event.symbol\n s = event.line\n if symbol:\n s = s[:-len(symbol)]\n quote = \"\"\n if s and s[-1] in [\"'\", '\"']:\n quote = s[-1]\n s = s[:-1]\n #base = (re.findall(r'(\\w+)\\[\\Z', s) or [None])[-1]\n base = \"\".join((re.findall(r'(\\w+\\.\\w*)?(\\.)?(\\w+)\\[\\Z', s) or [\"\"])[-1])\n ## print \"symbol:\", symbol\n ## print \"line:\", event.line\n ## print \"s:\", s\n ## print \"quote:\", quote\n ## print \"base:\", base\n ## print \"obj:\", self._ofind(base).get(\"obj\")\n\n obj = None\n if base:\n obj = self._ofind(base).get(\"obj\")\n ## print '\\n'\n ## print 'base', base\n ## print 'obj', obj\n if obj and isinstance(obj, ivy.tree.Node):\n completions = [\"'\"]\n if quote:\n completions = sorted([ x.label for x in obj.labeled() ])\n return completions\n\n raise IPython.core.error.TryNext()\n\ntry:\n ## import IPython\n IP = get_ipython() #IPython.ipapi.get()\n IP.magic('matplotlib')\n if IP:\n #IP.expose_magic(\"maketree\", __maketree)\n IP.define_magic(\"maketree\", __maketree)\n ## IP.set_hook(\n ## \"complete_command\", __node_completer, re_key=r'\\[*'\n ## )\n IP.set_hook(\n \"complete_command\", __node_completer,\n re_key='.+[[]([\\']|[\"])*\\w*$'\n )\n\nexcept:\n print sys.exc_info()[0]\n sys.stderr.write(\"Magic commands and completers requires IPython >= 0.11\\n\")\n\n## if __name__ == \"__main__\":\n## if len(sys.argv) > 1:\n## for fname in sys.argv[1:]:\n## if os.path.isfile(fname):\n## execfile(fname)\n", "ivy/layout.py": "\"\"\"\nlayout nodes in 2d space\n\nThe function of interest is `calc_node_positions` (aka nodepos)\n\"\"\"\nimport numpy\n\nclass Coordinates:\n \"\"\"\n Coordinates class for storing xy coordinates\n \"\"\"\n def __init__(self, x=0, y=0):\n self.x = x\n self.y = y\n\n def __repr__(self):\n return \"Coordinates(%g, %g)\" % (self.x, self.y)\n\n def point(self):\n return (self.x, self.y)\n\ndef smooth_xpos(node, n2coords):\n \"\"\"\n RR: What does smoothing do? -CZ\n \"\"\"\n if not node.isleaf:\n children = node.children\n for ch in children:\n smooth_xpos(ch, n2coords)\n\n if node.parent:\n px = n2coords[node.parent].x\n cx = min([ n2coords[ch].x for ch in children ])\n n2coords[node].x = (px + cx)/2.0\n\ndef depth_length_preorder_traversal(node, n2coords=None, isroot=False):\n \"\"\"\n Calculate node depth (root = depth 0) and length to root\n\n Args:\n node (Node): A node object\n\n Returns:\n dict: Mapping of nodes to coordinate objects. Coordinate\n objects have attributes \"depth\" and \"length_to_root\"\n \"\"\"\n if n2coords is None:\n n2coords = {}\n coords = n2coords.get(node) or Coordinates()\n coords.node = node\n if (not node.parent) or isroot:\n coords.depth = 0\n coords.length_to_root = 0.0\n else:\n try:\n p = n2coords[node.parent]\n coords.depth = p.depth + 1\n coords.length_to_root = p.length_to_root + (node.length or 0.0)\n except KeyError:\n print node.label, node.parent.label\n except AttributeError:\n coords.depth = 0\n coords.length_to_root = 0\n n2coords[node] = coords\n\n for ch in node.children:\n depth_length_preorder_traversal(ch, n2coords, False)\n\n return n2coords\n\ndef calc_node_positions(node, width, height,\n lpad=0, rpad=0, tpad=0, bpad=0,\n scaled=True, smooth=True, n2coords=None):\n \"\"\"\n Calculate where nodes should be positioned in 2d space for drawing a tree\n\n Args:\n node (Node): A (root) node\n width (float): The width of the canvas\n height (float): The height of the canvas\n lpad, rpad, tpad, bpad (float): Padding on the edges of the canvas.\n Optional, defaults to 0.\n scaled (bool): Whether or not the tree is scaled. Optional, defaults to\n True.\n smooth (bool): Whether or not to smooth the tree. Optional, defaults to\n True.\n Returns:\n dict: Mapping of nodes to Coordinates object\n Notes:\n Origin is at upper left\n \"\"\"\n width -= (lpad + rpad)\n height -= (tpad + bpad)\n\n if n2coords is None:\n n2coords = {}\n depth_length_preorder_traversal(node, n2coords=n2coords)\n leaves = node.leaves()\n nleaves = len(leaves)\n maxdepth = max([ n2coords[lf].depth for lf in leaves ])\n unitwidth = width/float(maxdepth)\n unitheight = height/(nleaves-1.0)\n\n xoff = (unitwidth * 0.5)\n yoff = (unitheight * 0.5)\n\n if scaled:\n maxlen = max([ n2coords[lf].length_to_root for lf in leaves ])\n scale = width/maxlen\n\n for i, lf in enumerate(leaves):\n c = n2coords[lf]\n c.y = i * unitheight\n if not scaled:\n c.x = width\n else:\n c.x = c.length_to_root * scale\n\n for n in node.postiter():\n c = n2coords[n]\n if (not n.isleaf) and n.children:\n children = n.children\n ymax = n2coords[children[0]].y\n ymin = n2coords[children[-1]].y\n c.y = (ymax + ymin)/2.0\n if not scaled:\n c.x = min([ n2coords[ch].x for ch in children ]) - unitwidth\n else:\n c.x = c.length_to_root * scale\n\n if (not scaled) and smooth:\n for i in range(10):\n smooth_xpos(node, n2coords)\n\n for coords in n2coords.values():\n coords.x += lpad\n coords.y += tpad\n\n for n, coords in n2coords.items():\n if n.parent:\n p = n2coords[n.parent]\n coords.px = p.x; coords.py = p.y\n else:\n coords.px = None; coords.py = None\n\n return n2coords\n\nnodepos = calc_node_positions\n\ndef cartesian(node, xscale=1.0, leafspace=None, scaled=True, n2coords=None,\n smooth=0, array=numpy.array, ones=numpy.ones, yunit=None):\n \"\"\"\n RR: What is the difference between this function and calc_node_positions?\n Is it being used anywhere? -CZ\n \"\"\"\n\n if n2coords is None:\n n2coords = {}\n\n depth_length_preorder_traversal(node, n2coords, True)\n leaves = node.leaves()\n nleaves = len(leaves)\n\n # leafspace is a vector that should sum to nleaves\n if leafspace is None:\n try: leafspace = [ float(x.leafspace) for x in leaves ]\n except: leafspace = numpy.zeros((nleaves,))\n assert len(leafspace) == nleaves\n #leafspace = array(leafspace)/(sum(leafspace)/float(nleaves))\n\n maxdepth = max([ n2coords[lf].depth for lf in leaves ])\n depth = maxdepth * xscale\n #if not yunit: yunit = 1.0/nleaves\n yunit = 1.0\n\n if scaled:\n maxlen = max([ n2coords[lf].length_to_root for lf in leaves ])\n depth = maxlen\n\n y = 0\n for i, lf in enumerate(leaves):\n c = n2coords[lf]\n yoff = 1 + (leafspace[i] * yunit)\n c.y = y + yoff*0.5\n y += yoff\n if not scaled:\n c.x = depth\n else:\n c.x = c.length_to_root\n\n for n in node.postiter():\n c = n2coords[n]\n if not n.isleaf:\n children = n.children\n v = [n2coords[children[0]].y, n2coords[children[-1]].y]\n v.sort()\n ymin, ymax = v\n c.y = (ymax + ymin)/2.0\n if not scaled:\n c.x = min([ n2coords[ch].x for ch in children ]) - 1.0\n else:\n c.x = c.length_to_root\n\n if not scaled:\n for i in range(smooth):\n smooth_xpos(node, n2coords)\n\n return n2coords\n\nif __name__ == \"__main__\":\n import tree\n node = tree.read(\"(a:3,(b:2,(c:4,d:5):1,(e:3,(f:1,g:1):2):2):2);\")\n for i, n in enumerate(node.iternodes()):\n if not n.isleaf:\n n.label = \"node%s\" % i\n node.label = \"root\"\n n2c = calc_node_positions(node, width=10, height=10, scaled=True)\n\n from pprint import pprint\n pprint(n2c)\n", "ivy/ltt.py": "\"\"\"\nCompute lineages through time\n\"\"\"\nimport numpy\n\n# RR: Should results be set to None and then defined in the function to avoid\n# problems with mutable defaults in functions? -CZ\ndef traverse(node, t=0, results=[]):\n \"\"\"\n Recursively traverse the tree and collect information about when\n nodes split and how many lineages are added by its splitting.\n \"\"\"\n if node.children:\n ## if not node.label:\n ## node.label = str(node.id)\n results.append((t, len(node.children)-1))\n for child in node.children:\n traverse(child, t+child.length, results)\n return results\n\ndef ltt(node):\n \"\"\"\n Calculate lineages through time. The tree is assumed to be an\n ultrametric chronogram (extant leaves, with branch lengths\n proportional to time).\n\n Args:\n node (Node): A node object. All nodes should have branch lengths.\n\n Returns:\n tuple: (times, diversity) - 1D-arrays containing the results.\n \"\"\"\n v = traverse(node) # v is a list of (time, diversity) values\n v.sort()\n # for plotting, it is easiest if x and y values are in separate\n # sequences, so we create a transposed array from v\n times, diversity = numpy.array(v).transpose()\n return times, diversity.cumsum()\n\ndef test():\n import newick, ascii\n n = newick.parse(\"(((a:1,b:2):3,(c:3,d:1):1,(e:0.5,f:3):2.5):1,g:4);\")\n v = ltt(n)\n print ascii.render(n, scaled=1)\n for t, n in v:\n print t, n\n\nif __name__ == \"__main__\":\n test()\n", "ivy/newick.py": "\"\"\"\nParse newick strings.\n\nThe function of interest is `parse`, which returns the root node of\nthe parsed tree.\n\"\"\"\nimport string, sys, re, shlex, types, itertools\nimport numpy\nimport nexus\nfrom cStringIO import StringIO\nfrom pprint import pprint\n\n## def read(s):\n## try:\n## s = file(s).read()\n## except:\n## try:\n## s = s.read()\n## except:\n## pass\n## return parse(s)\n\nLABELCHARS = '-.|/?#&'\nMETA = re.compile(r'([^,=\\s]+)\\s*=\\s*(\\{[^=}]*\\}|\"[^\"]*\"|[^,]+)?')\n\ndef add_label_chars(chars):\n global LABELCHARS\n LABELCHARS += chars\n\nclass Tokenizer(shlex.shlex):\n \"\"\"Provides tokens for parsing newick strings.\"\"\"\n def __init__(self, infile):\n global LABELCHARS\n shlex.shlex.__init__(self, infile, posix=False)\n self.commenters = ''\n self.wordchars = self.wordchars+LABELCHARS\n self.quotes = \"'\"\n\n def parse_embedded_comment(self):\n ws = self.whitespace\n self.whitespace = \"\"\n v = []\n while 1:\n token = self.get_token()\n if token == '':\n sys.stdout.write('EOF encountered mid-comment!\\n')\n break\n elif token == ']':\n break\n elif token == '[':\n self.parse_embedded_comment()\n else:\n v.append(token)\n self.whitespace = ws\n return \"\".join(v)\n ## print \"comment:\", v\n\ndef parse(data, ttable=None, treename=None):\n \"\"\"\n Parse a newick string.\n\n Args:\n data: Any file-like object that can be coerced into shlex, or\n a string (converted to StringIO)\n ttable (dict): Mapping of node labels in the newick string\n to other values.\n\n Returns:\n Node: The root node.\n \"\"\"\n from tree import Node\n\n if type(data) in types.StringTypes:\n data = StringIO(data)\n\n start_pos = data.tell()\n tokens = Tokenizer(data)\n\n node = None; root = None\n lp=0; rp=0; rooted=1\n\n previous = None\n\n ni = 0 # node id counter (preorder) - zero-based indexing\n li = 0 # leaf index counter\n ii = 0 # internal node index counter\n pi = 0 # postorder sequence\n while 1:\n token = tokens.get_token()\n #print token,\n if token == ';' or token == tokens.eof:\n assert lp == rp, \\\n \"unbalanced parentheses in tree description: (%s, %s)\" \\\n % (lp, rp)\n break\n\n # internal node\n elif token == '(':\n lp = lp+1\n newnode = Node()\n newnode.ni = ni; ni += 1\n newnode.isleaf = False\n newnode.ii = ii; ii += 1\n newnode.treename = treename\n if node:\n if node.children: newnode.left = node.children[-1].right+1\n else: newnode.left = node.left+1\n node.add_child(newnode)\n else:\n newnode.left = 1; newnode.right = 2\n newnode.right = newnode.left+1\n node = newnode\n\n elif token == ')':\n rp = rp+1\n node = node.parent\n node.pi = pi; pi += 1\n if node.children:\n node.right = node.children[-1].right + 1\n\n elif token == ',':\n node = node.parent\n if node.children:\n node.right = node.children[-1].right + 1\n\n # branch length\n elif token == ':':\n token = tokens.get_token()\n if token == '[':\n node.length_comment = tokens.parse_embedded_comment()\n token = tokens.get_token()\n\n if not (token == ''):\n try: brlen = float(token)\n except ValueError:\n raise ValueError, (\"invalid literal for branch length, \"\n \"'%s'\" % token)\n else:\n raise 'NewickError', \\\n 'unexpected end-of-file (expecting branch length)'\n\n node.length = brlen\n # comment\n elif token == '[':\n node.comment = tokens.parse_embedded_comment()\n if node.comment[0] == '&':\n # metadata\n meta = META.findall(node.comment[1:])\n if meta:\n for k, v in meta:\n v = eval(v.replace('{','(').replace('}',')'))\n node.meta[k] = v\n\n # leaf node or internal node label\n else:\n if previous != ')': # leaf node\n if ttable:\n try:\n ttoken = (ttable.get(int(token)) or\n ttable.get(token))\n except ValueError:\n ttoken = ttable.get(token)\n if ttoken:\n token = ttoken\n newnode = Node()\n newnode.ni = ni; ni += 1\n newnode.pi = pi; pi += 1\n newnode.label = \"_\".join(token.split()).replace(\"'\", \"\")\n newnode.isleaf = True\n newnode.li = li; li += 1\n if node.children: newnode.left = node.children[-1].right+1\n else: newnode.left = node.left+1\n newnode.right = newnode.left+1\n newnode.treename = treename\n node.add_child(newnode)\n node = newnode\n else: # label\n if ttable:\n node.label = ttable.get(token, token)\n else:\n node.label = token\n\n previous = token\n node.isroot = True\n return node\n\n## def string(node, length_fmt=\":%s\", end=True, newline=True):\n## \"Recursively create a newick string from node.\"\n## if not node.isleaf:\n## node_str = \"(%s)%s\" % \\\n## (\",\".join([ string(child, length_fmt, False, newline) \\\n## for child in node.children ]),\n## node.label or \"\"\n## )\n## else:\n## node_str = \"%s\" % node.label\n\n## if node.length is not None:\n## length_str = length_fmt % node.length\n## else:\n## length_str = \"\"\n\n## semicolon = \"\"\n## if end:\n## if not newline:\n## semicolon = \";\"\n## else:\n## semicolon = \";\\n\"\n## s = \"%s%s%s\" % (node_str, length_str, semicolon)\n## return s\n\n## def from_nexus(infile, bufsize=None):\n## bufsize = bufsize or 1024*5000\n## TTABLE = re.compile(r'\\btranslate\\s+([^;]+);', re.I | re.M)\n## TREE = re.compile(r'\\btree\\s+([_.\\w]+)\\s*=[^(]+(\\([^;]+;)', re.I | re.M)\n## s = infile.read(bufsize)\n## ttable = TTABLE.findall(s) or None\n## if ttable:\n## items = [ shlex.split(line) for line in ttable[0].split(\",\") ]\n## ttable = dict([ (k, v.replace(\" \", \"_\")) for k, v in items ])\n## trees = TREE.findall(s)\n## ## for i, t in enumerate(trees):\n## ## t = list(t)\n## ## if ttable:\n## ## t[1] = \"\".join(\n## ## [ ttable.get(x, \"_\".join(x.split()).replace(\"'\", \"\"))\n## ## for x in shlex.shlex(t[1]) ]\n## ## )\n## ## trees[i] = t\n## ## return trees\n## return ttable, trees\n\ndef parse_ampersand_comment(s):\n import pyparsing\n pyparsing.ParserElement.enablePackrat()\n from pyparsing import Word, Literal, QuotedString, CaselessKeyword, \\\n OneOrMore, Group, Optional, Suppress, Regex, Dict\n word = Word(string.letters+string.digits+\"%_\")\n key = word.setResultsName(\"key\") + Suppress(\"=\")\n single_value = (Word(string.letters+string.digits+\"-.\") |\n QuotedString(\"'\") |\n QuotedString('\"'))\n range_value = Group(Suppress(\"{\") +\n single_value.setResultsName(\"min\") +\n Suppress(\",\") +\n single_value.setResultsName(\"max\") +\n Suppress(\"}\"))\n pair = (key + (single_value | range_value).setResultsName(\"value\"))\n g = OneOrMore(pair)\n d = []\n for x in g.searchString(s):\n v = x.value\n if type(v) == str:\n try: v = float(v)\n except ValueError: pass\n else:\n try: v = map(float, v.asList())\n except ValueError: pass\n d.append((x.key, v))\n return d\n\ndef nexus_iter(infile):\n import pyparsing\n pyparsing.ParserElement.enablePackrat()\n from pyparsing import Word, Literal, QuotedString, CaselessKeyword, \\\n OneOrMore, Group, Optional, Suppress, Regex, Dict\n ## beginblock = Suppress(CaselessKeyword(\"begin\") +\n ## CaselessKeyword(\"trees\") + \";\")\n ## endblock = Suppress((CaselessKeyword(\"end\") |\n ## CaselessKeyword(\"endblock\")) + \";\")\n comment = Optional(Suppress(\"[&\") + Regex(r'[^]]+') + Suppress(\"]\"))\n ## translate = CaselessKeyword(\"translate\").suppress()\n name = Word(string.letters+string.digits+\"_.\") | QuotedString(\"'\")\n ## ttrec = Group(Word(string.digits).setResultsName(\"number\") +\n ## name.setResultsName(\"name\") +\n ## Optional(\",\").suppress())\n ## ttable = Group(translate + OneOrMore(ttrec) + Suppress(\";\"))\n newick = Regex(r'[^;]+;')\n tree = (CaselessKeyword(\"tree\").suppress() +\n Optional(\"*\").suppress() +\n name.setResultsName(\"tree_name\") +\n comment.setResultsName(\"tree_comment\") +\n Suppress(\"=\") +\n comment.setResultsName(\"root_comment\") +\n newick.setResultsName(\"newick\"))\n ## treesblock = Group(beginblock +\n ## Optional(ttable.setResultsName(\"ttable\")) +\n ## Group(OneOrMore(tree)) +\n ## endblock)\n\n def not_begin(s): return s.strip().lower() != \"begin trees;\"\n def not_end(s): return s.strip().lower() not in (\"end;\", \"endblock;\")\n def parse_ttable(f):\n ttable = {}\n while True:\n s = f.next().strip()\n if not s: continue\n if s.lower() == \";\": break\n if s[-1] == \",\": s = s[:-1]\n k, v = s.split()\n ttable[k] = v\n if s[-1] == \";\": break\n return ttable\n\n # read lines between \"begin trees;\" and \"end;\"\n f = itertools.takewhile(not_end, itertools.dropwhile(not_begin, infile))\n s = f.next().strip().lower()\n if s != \"begin trees;\":\n print sys.stderr, \"Expecting 'begin trees;', got %s\" % s\n raise StopIteration\n ttable = {}\n while True:\n try: s = f.next().strip()\n except StopIteration: break\n if not s: continue\n if s.lower() == \"translate\":\n ttable = parse_ttable(f)\n print \"ttable: %s\" % len(ttable)\n elif s.split()[0].lower()=='tree':\n match = tree.parseString(s)\n yield nexus.Newick(match, ttable)\n\n## def test():\n## with open(\"/home/rree/Dropbox/pedic-comm-amnat/phylo/beast-results/\"\n## \"simple_stigma.trees.log\") as f:\n## for rec in nexus_iter(f):\n## r = parse(rec.newick, ttable=rec.ttable)\n## for x in r: print x, x.comments\n\ndef test_parse_comment():\n v = ((\"height_median=1.1368683772161603E-13,height=9.188229043880098E-14,\"\n \"height_95%_HPD={5.6843418860808015E-14,1.7053025658242404E-13},\"\n \"height_range={0.0,2.8421709430404007E-13}\"),\n \"R\", \"lnP=-154.27154502342688,lnP=-24657.14341301901\",\n 'states=\"T-lateral\"')\n for s in v:\n print \"input:\", s\n print dict(parse_ampersand_comment(s))\n", "ivy/nexus.py": "import itertools\nfrom collections import defaultdict\nimport newick\n\nclass Newick(object):\n \"\"\"\n convenience class for storing the results of a newick tree\n record from a nexus file, as parsed by newick.nexus_iter\n \"\"\"\n def __init__(self, parse_results=None, ttable={}):\n self.name = \"\"\n self.comment = \"\"\n self.root_comment = \"\"\n self.newick = \"\"\n self.ttable = ttable\n if parse_results: self.populate(parse_results)\n\n def populate(self, parse_results, ttable={}):\n self.name = parse_results.tree_name\n self.comment = parse_results.tree_comment\n self.root_comment = parse_results.root_comment\n self.newick = parse_results.newick\n if ttable: self.ttable = ttable\n\n def parse(self, newick=newick):\n assert self.newick\n self.root = newick.parse(\n self.newick, ttable=self.ttable, treename=self.name\n )\n return self.root\n\ndef fetchaln(fname):\n \"\"\"Fetch alignment\"\"\"\n from Bio.Nexus import Nexus\n n = Nexus.Nexus(fname)\n return n\n\ndef split_blocks(infile):\n from cStringIO import StringIO\n dropwhile = itertools.dropwhile; takewhile = itertools.takewhile\n blocks = []\n not_begin = lambda s: not s.lower().startswith(\"begin\")\n not_end = lambda s: not s.strip().lower() in (\"end;\", \"endblock;\")\n while 1:\n f = takewhile(not_end, dropwhile(not_begin, infile))\n try:\n b = f.next().split()[-1][:-1]\n blocks.append((b, StringIO(\"\".join(list(f)))))\n except StopIteration:\n break\n return blocks\n\ndef parse_treesblock(infile):\n import string\n from pyparsing import Optional, Word, Regex, CaselessKeyword, Suppress\n from pyparsing import QuotedString\n comment = Optional(Suppress(\"[&\") + Regex(r'[^]]+') + Suppress(\"]\"))\n name = Word(string.letters+string.digits+\"_\") | QuotedString(\"'\")\n newick = Regex(r'[^;]+;')\n tree = (CaselessKeyword(\"tree\").suppress() +\n Optional(\"*\").suppress() +\n name.setResultsName(\"tree_name\") +\n comment.setResultsName(\"tree_comment\") +\n Suppress(\"=\") +\n comment.setResultsName(\"root_comment\") +\n newick.setResultsName(\"newick\"))\n ## treesblock = Group(beginblock +\n ## Optional(ttable.setResultsName(\"ttable\")) +\n ## Group(OneOrMore(tree)) +\n ## endblock)\n\n def parse_ttable(f):\n ttable = {}\n while True:\n s = f.next().strip()\n if s.lower() == \";\": break\n if s[-1] in \",;\": s = s[:-1]\n k, v = s.split()\n ttable[k] = v\n if s[-1] == \";\": break\n return ttable\n\n ttable = {}\n while True:\n try: s = infile.next().strip()\n except StopIteration: break\n if s.lower() == \"translate\":\n ttable = parse_ttable(infile)\n print \"ttable: %s\" % len(ttable)\n else:\n match = tree.parseString(s)\n yield Newick(match, ttable)\n", "ivy/sequtil.py": "from itertools import izip, imap\nimport numpy\n\ndef finditer(seq, substr, start=0):\n \"\"\"\n Find substrings within a sequence\n\n Args:\n seq (str): A sequence.\n substr (str): A subsequence to search for\n start (int): Starting index. Defaults to 0\n Yields:\n int: Starting indicies of where the substr was found in seq\n \"\"\"\n N = len(substr)\n i = seq.find(substr, start)\n while i >= 0:\n yield i\n i = seq.find(substr, i+N)\n\ndef gapidx(seq, gapchar='-'):\n \"\"\"\n For a sequence with gaps, calculate site positions without gaps\n\n Args:\n seq (list): Each element of the list is one character in a sequence.\n gapchar (str): The character gaps are coded as. Defaults to '-'\n Returns:\n array: An array where the first element corresponds to range(number of\n characters that are not gaps) and the second element is the indicies\n of all characters that are not gaps.\n \"\"\"\n a = numpy.array(seq)\n idx = numpy.arange(len(a))\n nongap = idx[a != gapchar]\n return numpy.array((numpy.arange(len(nongap)), nongap))\n\ndef find_stop_codons(seq, pos=0):\n \"\"\"\n Find stop codons within sequence (in reading frame)\n\n Args:\n seq (str): A sequence\n pos (int): Starting position. Defaults to 0.\n Yields:\n tuple: The index where the stop codon starts\n and which stop codon was found.\n \"\"\"\n s = seq[pos:]\n it = iter(s)\n g = imap(lambda x:\"\".join(x), izip(it, it, it))\n for i, x in enumerate(g):\n if x in (\"TAG\", \"TAA\", \"TGA\"):\n yield pos+(i*3), x\n", "ivy/storage.py": "from operator import itemgetter\nfrom heapq import nlargest\nfrom itertools import repeat, ifilter\n\nclass Storage(dict):\n \"\"\"\n A Storage object is like a dictionary except `obj.foo` can be used\n in addition to `obj['foo']`.\n\n From web2py/gluon/storage.py by Massimo Di Pierro (www.web2py.com)\n \"\"\"\n def __getattr__(self, key):\n try:\n return self[key]\n except KeyError:\n return None\n\n def __setattr__(self, key, value):\n self[key] = value\n\n def __delattr__(self, key):\n try:\n del self[key]\n except KeyError, k:\n raise AttributeError, k\n\n def __repr__(self):\n return ''\n\n def __getstate__(self):\n return dict(self)\n\n def __setstate__(self, value):\n for (k, v) in value.items():\n self[k] = v\n\nclass MaxDict(dict):\n def __setitem__(self, key, value):\n v = self.get(key)\n if value > v:\n dict.__setitem__(self, key, value)\n \n#from http://code.activestate.com/recipes/576611/\nclass Counter(dict):\n \"\"\"Dict subclass for counting hashable objects. Sometimes called a bag\n or multiset. Elements are stored as dictionary keys and their counts\n are stored as dictionary values.\n\n >>> Counter('zyzygy')\n Counter({'y': 3, 'z': 2, 'g': 1})\n\n \"\"\"\n\n def __init__(self, iterable=None, **kwds):\n \"\"\"Create a new, empty Counter object. And if given, count elements\n from an input iterable. Or, initialize the count from another mapping\n of elements to their counts.\n\n >>> c = Counter() # a new, empty counter\n >>> c = Counter('gallahad') # a new counter from an iterable\n >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping\n >>> c = Counter(a=4, b=2) # a new counter from keyword args\n\n \"\"\" \n self.update(iterable, **kwds)\n\n def __missing__(self, key):\n return 0\n\n def most_common(self, n=None):\n \"\"\"List the n most common elements and their counts from the most\n common to the least. If n is None, then list all element counts.\n\n >>> Counter('abracadabra').most_common(3)\n [('a', 5), ('r', 2), ('b', 2)]\n\n \"\"\" \n if n is None:\n return sorted(self.iteritems(), key=itemgetter(1), reverse=True)\n return nlargest(n, self.iteritems(), key=itemgetter(1))\n\n def elements(self):\n \"\"\"Iterator over elements repeating each as many times as its count.\n\n >>> c = Counter('ABCABC')\n >>> sorted(c.elements())\n ['A', 'A', 'B', 'B', 'C', 'C']\n\n If an element's count has been set to zero or is a negative number,\n elements() will ignore it.\n\n \"\"\"\n for elem, count in self.iteritems():\n for _ in repeat(None, count):\n yield elem\n\n # Override dict methods where the meaning changes for Counter objects.\n\n @classmethod\n def fromkeys(cls, iterable, v=None):\n raise NotImplementedError(\n 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')\n\n def update(self, iterable=None, **kwds):\n \"\"\"Like dict.update() but add counts instead of replacing them.\n\n Source can be an iterable, a dictionary, or another Counter instance.\n\n >>> c = Counter('which')\n >>> c.update('witch') # add elements from another iterable\n >>> d = Counter('watch')\n >>> c.update(d) # add elements from another counter\n >>> c['h'] # four 'h' in which, witch, and watch\n 4\n\n \"\"\" \n if iterable is not None:\n if hasattr(iterable, 'iteritems'):\n if self:\n self_get = self.get\n for elem, count in iterable.iteritems():\n self[elem] = self_get(elem, 0) + count\n else:\n dict.update(self, iterable) # fast path when counter is empty\n else:\n self_get = self.get\n for elem in iterable:\n self[elem] = self_get(elem, 0) + 1\n if kwds:\n self.update(kwds)\n\n def copy(self):\n 'Like dict.copy() but returns a Counter instance instead of a dict.'\n return Counter(self)\n\n def __delitem__(self, elem):\n 'Like dict.__delitem__() but does not raise KeyError for missing values.'\n if elem in self:\n dict.__delitem__(self, elem)\n\n def __repr__(self):\n if not self:\n return '%s()' % self.__class__.__name__\n items = ', '.join(map('%r: %r'.__mod__, self.most_common()))\n return '%s({%s})' % (self.__class__.__name__, items)\n\n # Multiset-style mathematical operations discussed in:\n # Knuth TAOCP Volume II section 4.6.3 exercise 19\n # and at http://en.wikipedia.org/wiki/Multiset\n #\n # Outputs guaranteed to only include positive counts.\n #\n # To strip negative and zero counts, add-in an empty counter:\n # c += Counter()\n\n def __add__(self, other):\n \"\"\"Add counts from two counters.\n\n >>> Counter('abbb') + Counter('bcc')\n Counter({'b': 4, 'c': 2, 'a': 1})\n\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n result = Counter()\n for elem in set(self) | set(other):\n newcount = self[elem] + other[elem]\n if newcount > 0:\n result[elem] = newcount\n return result\n\n def __sub__(self, other):\n \"\"\" Subtract count, but keep only results with positive counts.\n\n >>> Counter('abbbc') - Counter('bccd')\n Counter({'b': 2, 'a': 1})\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n result = Counter()\n for elem in set(self) | set(other):\n newcount = self[elem] - other[elem]\n if newcount > 0:\n result[elem] = newcount\n return result\n\n def __or__(self, other):\n \"\"\"Union is the maximum of value in either of the input counters.\n\n >>> Counter('abbb') | Counter('bcc')\n Counter({'b': 3, 'c': 2, 'a': 1})\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n _max = max\n result = Counter()\n for elem in set(self) | set(other):\n newcount = _max(self[elem], other[elem])\n if newcount > 0:\n result[elem] = newcount\n return result\n\n def __and__(self, other):\n \"\"\" Intersection is the minimum of corresponding counts.\n\n >>> Counter('abbb') & Counter('bcc')\n Counter({'b': 1})\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n _min = min\n result = Counter()\n if len(self) < len(other):\n self, other = other, self\n for elem in ifilter(self.__contains__, other):\n newcount = _min(self[elem], other[elem])\n if newcount > 0:\n result[elem] = newcount\n return result\n\ndef convert(d):\n \"convert a (potentially nested) dict to Storage\"\n from types import DictType\n t = type(d)\n if t == DictType:\n for k,v in d.items():\n d[k] = convert(v)\n return Storage(d)\n return d\n\nif __name__ == '__main__':\n import doctest\n print doctest.testmod()\n", "ivy/tree.py": "\"\"\"\nThe Node class and functions for creating trees from Newick strings,\netc.\n\nivy does not have a Tree class per se, as most functions operate\ndirectly on Node objects.\n\"\"\"\nimport os, types\nfrom storage import Storage\nfrom copy import copy as _copy\nfrom matrix import vcv\nimport newick\nfrom itertools import izip_longest\n\n## class Tree(object):\n## \"\"\"\n## A simple Tree class.\n## \"\"\"\n## def __init__(self, data=None, format=\"newick\", name=None, ttable=None):\n## self.root = None\n## if data:\n## self.root = read(data, format, name, ttable)\n## self.name = name\n## self.ttable = ttable\n\n## def __getattribute__(self, a):\n## r = object.__getattribute__(self, 'root')\n## try:\n## return object.__getattribute__(r, a)\n## except AttributeError:\n## return object.__getattribute__(self, a)\n\ndef traverse(node):\n \"recursive preorder iterator based solely on .children attribute\"\n yield node\n for child in node.children:\n for descendant in traverse(child):\n yield descendant\n\nclass Node(object):\n \"\"\"\n A basic Node class with attributes and references to child nodes\n ('children', a list) and 'parent'.\n\n Keyword Args:\n id: ID of the node. If not provided, is set using\n builtin id function\n ni (int): Node index.\n li (int): Leaf index.\n isroot (bool): Is the node a root.\n isleaf (bool): Is the node a leaf.\n label (str): Node label.\n length (float): Branch length from node to parent\n support: RR: Are these bootstrap support values? -CZ\n age (float): Age of the node in time units.\n parent (Node): Parent of the ndoe.\n children (list): List of node objects. Children of node\n nchildren (int): No. of children\n left: RR: Unsure what left and right mean -CZ\n treename: Name of tree\n comment: Comments for tree\n\n \"\"\"\n def __init__(self, **kwargs):\n self.id = None\n self.ni = None # node index\n self.li = None # leaf index\n self.isroot = False\n self.isleaf = False\n self.label = None\n self.length = None\n self.support = None\n self.age = None\n self.parent = None\n self.children = []\n self.nchildren = 0\n self.left = None\n self.right = None\n self.treename = \"\"\n self.comment = \"\"\n self.meta = {}\n ## self.length_comment = \"\"\n ## self.label_comment = \"\"\n if kwargs:\n for k, v in kwargs.items():\n setattr(self, k, v)\n if self.id is None: self.id = id(self)\n\n def __copy__(self):\n return self.copy()\n\n def __repr__(self):\n v = []\n if self.isroot:\n v.append(\"root\")\n elif self.isleaf:\n v.append(\"leaf\")\n\n if self.label:\n v.append(\"'%s'\" % self.label)\n\n s = \", \".join(v)\n\n nid = ((self.id if (self.id is not None) else self.ni) or\n '<%s>' % id(self))\n if s:\n s = \"Node(%s, %s)\" % (nid, s)\n else:\n s = \"Node(%s)\" % nid\n return s\n\n\n def __contains__(self, other):\n \"\"\"\n For use with `in` keyword\n\n Args:\n other: Another node or node label.\n Returns:\n bool: Whether or not the other node is a descendant of self\n \"\"\"\n otype = type(other)\n if other and otype in types.StringTypes:\n for x in self:\n if other == x.label:\n return True\n return False\n else:\n assert otype == type(self)\n for x in self.iternodes():\n if other == x:\n return True\n return False\n\n def __iter__(self):\n for node in self.iternodes():\n yield node\n\n def __len__(self):\n \"\"\"\n Number of nodes descended from self\n\n Returns:\n int: Number of nodes descended from self (including self)\n \"\"\"\n i = 0\n for n in self:\n i += 1\n return i\n\n def __nonzero__(self):\n return True\n\n def __getitem__(self, x):\n \"\"\"\n Args:\n x: A Node, Node.id (int) or a Node.label (string)\n\n Returns:\n Node: Found node(s)\n\n \"\"\"\n for n in self:\n if n==x or n.id==x or n.ni == x or (n.label and n.label==x):\n return n\n raise IndexError(str(x))\n\n def ascii(self, *args, **kwargs):\n \"\"\"\n Create ascii tree.\n\n Keyword Args:\n unitlen (float): How long each unit should be rendered as.\n Defaults to 3.\n minwidth (float): Minimum width of the plot. Defaults to 50\n maxwidth (float): Maximum width of the plot. Defaults to None\n scaled (bool): Whether or not the tree is scaled. Defaults to False\n show_internal_labels (bool): Whether or not to show labels\n on internal nodes. Defaults to True.\n Returns:\n str: Ascii tree to be shown with print().\n \"\"\"\n from ascii import render\n return render(self, *args, **kwargs)\n\n def collapse(self, add=False):\n \"\"\"\n Remove self and collapse children to polytomy\n\n Args:\n add (bool): Whether or not to add self's length to children's\n length.\n\n Returns:\n Node: Parent of self\n\n \"\"\"\n assert self.parent\n p = self.prune()\n for c in self.children:\n p.add_child(c)\n if add and (c.length is not None):\n c.length += self.length\n self.children = []\n return p\n\n def copy(self, recurse=False):\n \"\"\"\n Return a copy of the node, but not copies of children, parent,\n or any attribute that is a Node.\n\n If `recurse` is True, recursively copy child nodes.\n\n Args:\n recurse (bool): Whether or not to copy children as well as self.\n\n Returns:\n Node: A copy of self.\n\n TODO: test this function.\n\n RR: This function runs rather slowly -CZ\n \"\"\"\n newnode = Node()\n for attr, value in self.__dict__.items():\n if (attr not in (\"children\", \"parent\") and\n not isinstance(value, Node)):\n setattr(newnode, attr, _copy(value))\n if recurse:\n newnode.children = [\n child.copy(True) for child in self.children\n ]\n return newnode\n\n def leafsets(self, d=None, labels=False):\n \"\"\"return a mapping of nodes to leaf sets (nodes or labels)\"\"\"\n d = d or {}\n if not self.isleaf:\n s = set()\n for child in self.children:\n if child.isleaf:\n if labels:\n s.add(child.label)\n else:\n s.add(child)\n else:\n d = child.leafsets(d, labels)\n s = s | d[child]\n d[self] = frozenset(s)\n return d\n\n def mrca(self, *nodes):\n \"\"\"\n Find most recent common ancestor of *nodes*\n\n Args:\n *nodes (Node): Node objects\n Returns:\n Node: The MRCA of *nodes*\n \"\"\"\n if len(nodes) == 1:\n nodes = tuple(nodes[0])\n if len(nodes) == 1:\n return nodes[0]\n nodes = set([ self[n] for n in nodes ])\n anc = []\n def f(n):\n seen = set()\n for c in n.children: seen.update(f(c))\n if n in nodes: seen.add(n)\n if seen == nodes and (not anc): anc.append(n)\n return seen\n f(self)\n return anc[0]\n\n ## def mrca(self, *nodes):\n ## \"\"\"\n ## Find most recent common ancestor of *nodes*\n ## \"\"\"\n ## if len(nodes) == 1:\n ## nodes = tuple(nodes[0])\n ## if len(nodes) == 1:\n ## return nodes[0]\n ## ## assert len(nodes) > 1, (\n ## ## \"Need more than one node for mrca(), got %s\" % nodes\n ## ## )\n ## def f(x):\n ## if isinstance(x, Node):\n ## return x\n ## elif type(x) in types.StringTypes:\n ## return self.find(x)\n ## nodes = map(f, nodes)\n ## assert all(filter(lambda x: isinstance(x, Node), nodes))\n\n ## #v = [ list(n.rootpath()) for n in nodes if n in self ]\n ## v = [ list(x) for x in izip_longest(*[ reversed(list(n.rootpath()))\n ## for n in nodes if n in self ]) ]\n ## if len(v) == 1:\n ## return v[0][0]\n ## anc = None\n ## for x in v:\n ## s = set(x)\n ## if len(s) == 1: anc = list(s)[0]\n ## else: break\n ## return anc\n\n def ismono(self, *leaves):\n \"\"\"\n Test if leaf descendants are monophyletic\n\n Args:\n *leaves (Node): At least two leaf Node objects\n\n Returns:\n bool: Are the leaf descendants monophyletic?\n\n RR: Should this function have a check to make sure the input nodes are\n leaves? There is some strange behavior if you input internal nodes -CZ\n \"\"\"\n if len(leaves) == 1:\n leaves = list(leaves)[0]\n assert len(leaves) > 1, (\n \"Need more than one leaf for ismono(), got %s\" % leaves\n )\n anc = self.mrca(leaves)\n if anc:\n return bool(len(anc.leaves())==len(leaves))\n\n def order_subtrees_by_size(self, n2s=None, recurse=False, reverse=False):\n \"\"\"\n Order interal clades by size\n\n \"\"\"\n if n2s is None:\n n2s = clade_sizes(self)\n if not self.isleaf:\n v = [ (n2s[c], c.label, c) for c in self.children ]\n v.sort()\n if reverse:\n v.reverse()\n self.children = [ x[-1] for x in v ]\n if recurse:\n for c in self.children:\n c.order_subtrees_by_size(n2s, recurse=True, reverse=reverse)\n\n def ladderize(self, reverse=False):\n self.order_subtrees_by_size(recurse=True, reverse=reverse)\n return self\n\n def add_child(self, child):\n \"\"\"\n Add child as child of self\n\n Args:\n child (Node): A node object\n\n \"\"\"\n assert child not in self.children\n self.children.append(child)\n child.parent = self\n child.isroot = False\n self.nchildren += 1\n\n def bisect_branch(self):\n \"\"\"\n Add new node as parent to self in the middle of branch to parent.\n\n Returns:\n Node: A new node.\n\n \"\"\"\n assert self.parent\n parent = self.prune()\n n = Node()\n if self.length:\n n.length = self.length/2.0\n self.length /= 2.0\n parent.add_child(n)\n n.add_child(self)\n return n\n\n def remove_child(self, child):\n \"\"\"\n Remove child.\n\n Args:\n child (Node): A node object that is a child of self\n\n \"\"\"\n assert child in self.children\n self.children.remove(child)\n child.parent = None\n self.nchildren -= 1\n if not self.children:\n self.isleaf = True\n\n def labeled(self):\n \"\"\"\n Return a list of all descendant nodes that are labeled\n\n Returns:\n list: All descendants of self that are labeled (including self)\n \"\"\"\n return [ n for n in self if n.label ]\n\n def leaves(self, f=None):\n \"\"\"\n Return a list of leaves. Can be filtered with f.\n\n Args:\n f (function): A function that evaluates to True if called with desired\n node as the first input\n\n Returns:\n list: A list of leaves that are true for f (if f is given)\n\n \"\"\"\n if f: return [ n for n in self if (n.isleaf and f(n)) ]\n return [ n for n in self if n.isleaf ]\n\n def internals(self, f=None):\n \"\"\"\n Return a list nodes that have children (internal nodes)\n\n Args:\n f (function): A function that evaluates to true if called with desired\n node as the first input\n\n Returns:\n list: A list of internal nodes that are true for f (if f is given)\n\n \"\"\"\n if f: return [ n for n in self if (n.children and f(n)) ]\n return [ n for n in self if n.children ]\n\n def clades(self):\n \"\"\"\n Get internal nodes descended from self\n\n Returns:\n list: A list of internal nodes descended from (and not including) self.\n\n \"\"\"\n return [ n for n in self if (n is not self) and not n.isleaf ]\n\n def iternodes(self, f=None):\n \"\"\"\n Return a generator of nodes descendant from self - including self\n\n Args:\n f (function): A function that evaluates to true if called with\n desired node as the first input\n\n Yields:\n Node: Nodes descended from self (including self) in\n preorder sequence\n\n \"\"\"\n if (f and f(self)) or (not f):\n yield self\n for child in self.children:\n for n in child.iternodes(f):\n yield n\n\n def iterleaves(self):\n \"\"\"\n Yield leaves descendant from self\n \"\"\"\n return self.iternodes(lambda x:x.isleaf)\n\n def preiter(self, f=None):\n \"\"\"\n Yield nodes in preorder sequence\n \"\"\"\n for n in self.iternodes(f=f):\n yield n\n\n def postiter(self, f=None):\n \"\"\"\n Yield nodes in postorder sequence\n \"\"\"\n if not self.isleaf:\n for child in self.children:\n for n in child.postiter():\n if (f and f(n)) or (not f):\n yield n\n if (f and f(self)) or (not f):\n yield self\n\n def descendants(self, order=\"pre\", v=None, f=None):\n \"\"\"\n Return a list of nodes descendant from self - but _not_\n including self!\n\n Args:\n order (str): Indicates wether to return nodes in preorder or\n postorder sequence. Optional, defaults to \"pre\"\n f (function): filtering function that evaluates to True if desired\n node is called as the first parameter.\n\n Returns:\n list: A list of nodes descended from self not including self.\n\n \"\"\"\n v = v or []\n for child in self.children:\n if (f and f(child)) or (not f):\n if order == \"pre\":\n v.append(child)\n else:\n v.insert(0, child)\n if child.children:\n child.descendants(order, v, f)\n return v\n\n def get(self, f, *args, **kwargs):\n \"\"\"\n Return the first node found by node.find()\n\n Args:\n f (function): A function that evaluates to True if desired\n node is called as the first parameter.\n Returns:\n Node: The first node found by node.find()\n\n \"\"\"\n v = self.find(f, *args, **kwargs)\n try:\n return v.next()\n except StopIteration:\n return None\n\n def grep(self, s, ignorecase=True):\n \"\"\"\n Find nodes by regular-expression search of labels\n\n Args:\n s (str): String to search.\n ignorecase (bool): Indicates to ignore case. Defaults to true.\n\n Returns:\n lsit: A list of node objects whose labels were matched by s.\n\n \"\"\"\n import re\n if ignorecase:\n pattern = re.compile(s, re.IGNORECASE)\n else:\n pattern = re.compile(s)\n\n search = pattern.search\n return [ x for x in self if x.label and search(x.label) ]\n\n def lgrep(self, s, ignorecase=True):\n \"\"\"\n Find leaves by regular-expression search of labels\n\n Args:\n s (str): String to search.\n ignorecase (bool): Indicates to ignore case. Defaults to true.\n\n Returns:\n lsit: A list of node objects whose labels were matched by s.\n\n \"\"\"\n return [ x for x in self.grep(s, ignorecase=ignorecase) if x.isleaf ]\n\n def bgrep(self, s, ignorecase=True):\n \"\"\"\n Find branches (internal nodes) by regular-expression search of\n labels\n\n Args:\n s (str): String to search.\n ignorecase (bool): Indicates to ignore case. Defaults to true.\n\n Returns:\n lsit: A list of node objects whose labels were matched by s.\n\n \"\"\"\n return [ x for x in self.grep(s, ignorecase=ignorecase) if\n (not x.isleaf) ]\n\n def find(self, f, *args, **kwargs):\n \"\"\"\n Find descendant nodes.\n\n Args:\n f: Function or a string. If a string, it is converted to a\n function for finding *f* as a substring in node labels.\n Otherwise, *f* should evaluate to True if called with a desired\n node as the first parameter.\n\n Yields:\n Node: Found nodes in preorder sequence.\n\n \"\"\"\n if not f: return\n if type(f) in types.StringTypes:\n func = lambda x: (f or None) in (x.label or \"\")\n else:\n func = f\n for n in self.iternodes():\n if func(n, *args, **kwargs):\n yield n\n\n def findall(self, f, *args, **kwargs):\n \"\"\"Return a list of found nodes.\"\"\"\n return list(self.find(f, *args, **kwargs))\n\n def prune(self):\n \"\"\"\n Remove self if self is not root.\n\n Returns:\n Node: Parent of self. If parent had only two children,\n parent is now a 'knee' and can be removed with excise.\n\n \"\"\"\n p = self.parent\n if p:\n p.remove_child(self)\n return p\n\n def excise(self):\n \"\"\"\n For 'knees': remove self from between parent and single child\n \"\"\"\n assert self.parent\n assert len(self.children)==1\n p = self.parent\n c = self.children[0]\n if c.length is not None and self.length is not None:\n c.length += self.length\n c.prune()\n self.prune()\n p.add_child(c)\n return p\n\n def graft(self, node):\n \"\"\"\n Add node as sister to self.\n \"\"\"\n parent = self.parent\n parent.remove_child(self)\n n = Node()\n n.add_child(self)\n n.add_child(node)\n parent.add_child(n)\n\n ## def leaf_distances(self, store=None, measure=\"length\"):\n ## \"\"\"\n ## for each internal node, calculate the distance to each leaf,\n ## measured in branch length or internodes\n ## \"\"\"\n ## if store is None:\n ## store = {}\n ## leaf2len = {}\n ## if self.children:\n ## for child in self.children:\n ## if measure == \"length\":\n ## dist = child.length\n ## elif measure == \"nodes\":\n ## dist = 1\n ## child.leaf_distances(store, measure)\n ## if child.isleaf:\n ## leaf2len[child] = dist\n ## else:\n ## for k, v in store[child].items():\n ## leaf2len[k] = v + dist\n ## else:\n ## leaf2len[self] = {self: 0}\n ## store[self] = leaf2len\n ## return store\n\n def leaf_distances(self, measure=\"length\"):\n \"\"\"\n RR: I don't quite understand the structure of the output. Also,\n I can't figure out what \"measure\" does.-CZ\n \"\"\"\n from collections import defaultdict\n store = defaultdict(lambda:defaultdict(lambda:0))\n nodes = [ x for x in self if x.children ]\n for lf in self.leaves():\n x = lf.length\n for n in lf.rootpath(self):\n store[n][lf] = x\n x += (n.length or 0)\n return store\n\n def rootpath(self, end=None, stop=None):\n \"\"\"\n Iterate over parent nodes toward the root, or node *end* if\n encountered.\n\n Args:\n end (Node): A Node object to iterate to (instead of iterating\n towards root). Optional, defaults to None\n stop (function): A function that returns True if desired node is called\n as the first parameter. Optional, defaults to None\n\n Yields:\n Node: Nodes in path to root (or end).\n\n \"\"\"\n n = self.parent\n while 1:\n if n is None: raise StopIteration\n yield n\n if n.isroot or (end and n == end) or (stop and stop(n)):\n raise StopIteration\n n = n.parent\n\n def rootpath_length(self, end=None):\n \"\"\"\n Get length from self to root(if end is None) or length\n from self to an ancestor node (if end is an ancestor to self)\n\n Args:\n end (Node): A node object\n\n Returns:\n float: The length from self to root/end\n\n \"\"\"\n n = self\n x = 0.0\n while n.parent:\n x += n.length\n if n.parent == end:\n break\n n = n.parent\n return x\n ## f = lambda x:x.parent==end\n ## v = [self.length]+[ x.length for x in self.rootpath(stop=f)\n ## if x.parent ]\n ## assert None not in v\n ## return sum(v)\n\n def max_tippath(self, first=True):\n \"\"\"\n Get the maximum length from self to a leaf node\n \"\"\"\n v = 0\n if self.children:\n v = max([ c.max_tippath(False) for c in self.children ])\n if not first:\n if self.length is None: v += 1\n else: v += self.length\n return v\n\n def subtree_mapping(self, labels, clean=False):\n \"\"\"\n Find the set of nodes in 'labels', and create a new tree\n representing the subtree connecting them. Nodes are assumed\n to be non-nested.\n\n Returns:\n dict: a mapping of old nodes to new nodes and vice versa.\n\n TODO: test this, high bug probability\n \"\"\"\n d = {}\n oldtips = [ x for x in self.leaves() if x.label in labels ]\n for tip in oldtips:\n path = list(tip.rootpath())\n for node in path:\n if node not in d:\n newnode = Node()\n newnode.isleaf = node.isleaf\n newnode.length = node.length\n newnode.label = node.label\n d[node] = newnode\n d[newnode] = node\n else:\n newnode = d[node]\n\n for child in node.children:\n if child in d:\n newchild = d[child]\n if newchild not in newnode.children:\n newnode.add_child(newchild)\n d[\"oldroot\"] = self\n d[\"newroot\"] = d[self]\n if clean:\n n = d[\"newroot\"]\n while 1:\n if n.nchildren == 1:\n oldnode = d[n]\n del d[oldnode]; del d[n]\n child = n.children[0]\n child.parent = None\n child.isroot = True\n d[\"newroot\"] = child\n d[\"oldroot\"] = d[child]\n n = child\n else:\n break\n\n for tip in oldtips:\n newnode = d[tip]\n while 1:\n newnode = newnode.parent\n oldnode = d[newnode]\n if newnode.nchildren == 1:\n child = newnode.children[0]\n if newnode.length:\n child.length += newnode.length\n newnode.remove_child(child)\n if newnode.parent:\n parent = newnode.parent\n parent.remove_child(newnode)\n parent.add_child(child)\n del d[oldnode]; del d[newnode]\n if not newnode.parent:\n break\n\n return d\n\n def reroot_orig(self, newroot):\n assert newroot in self\n self.isroot = False\n newroot.isroot = True\n v = []\n n = newroot\n while 1:\n v.append(n)\n if not n.parent: break\n n = n.parent\n v.reverse()\n for i, cp in enumerate(v[:-1]):\n node = v[i+1]\n # node is current node; cp is current parent\n cp.remove_child(node)\n node.add_child(cp)\n cp.length = node.length\n return newroot\n\n def reroot(self, newroot):\n \"\"\"\n RR: I can't get this to work properly -CZ\n \"\"\"\n newroot = self[newroot]\n assert newroot in self\n self.isroot = False\n n = newroot\n v = list(n.rootpath())\n v.reverse()\n for node in (v+[n])[1:]:\n # node is current node; cp is current parent\n cp = node.parent\n cp.remove_child(node)\n node.add_child(cp)\n cp.length = node.length\n cp.label = node.label\n newroot.isroot = True\n return newroot\n\n def makeroot(self, shift_labels=False):\n \"\"\"\n shift_labels: flag to shift internal parent-child node labels\n when internode polarity changes; suitable e.g. if internal node\n labels indicate unrooted bipartition support\n \"\"\"\n v = list(self.rootpath())\n v[-1].isroot = False\n v.reverse()\n for node in v[1:] + [self]:\n # node is current node; cp is current parent\n cp = node.parent\n cp.remove_child(node)\n node.add_child(cp)\n cp.length = node.length\n if shift_labels:\n cp.label = node.label\n self.isroot = True\n return self\n\n def write(self, outfile=None, format=\"newick\", length_fmt=\":%g\", end=True,\n clobber=False):\n if format==\"newick\":\n s = write_newick(self, outfile, length_fmt, True, clobber)\n if not outfile:\n return s\n\n\nreroot = Node.reroot\n\ndef index(node, n=0, d=0):\n \"\"\"\n recursively attach 'next', 'back', (and 'left', 'right'), 'ni',\n 'ii', 'pi', and 'node_depth' attributes to nodes\n \"\"\"\n node.next = node.left = n\n if not node.parent:\n node.node_depth = d\n else:\n node.node_depth = node.parent.node_depth + 1\n n += 1\n for i, c in enumerate(node.children):\n if i > 0:\n n = node.children[i-1].back + 1\n index(c, n)\n\n if node.children:\n node.back = node.right = node.children[-1].back + 1\n else:\n node.back = node.right = n\n return node.back\n\ndef remove_singletons(root, add=True):\n \"Remove descendant nodes that are the sole child of their parent\"\n for leaf in root.leaves():\n for n in leaf.rootpath():\n if n.parent and len(n.parent.children)==1:\n n.collapse(add)\n\ndef cls(root):\n \"\"\"\n Get clade sizes of whole tree\n Args:\n * root: A root node\n\n Returns:\n * A dict mapping nodes to clade sizes\n\n \"\"\"\n results = {}\n for node in root.postiter():\n if node.isleaf:\n results[node] = 1\n else:\n results[node] = sum(results[child] for child in node.children)\n return results\n\ndef clade_sizes(node, results={}):\n \"\"\"Map node and descendants to number of descendant tips\"\"\"\n size = int(node.isleaf)\n if not node.isleaf:\n for child in node.children:\n clade_sizes(child, results)\n size += results[child]\n results[node] = size\n return results\n\ndef write(node, outfile=None, format=\"newick\", length_fmt=\":%g\",\n clobber=False):\n if format==\"newick\" or ((type(outfile) in types.StringTypes) and\n (outfile.endswith(\".newick\") or\n outfile.endswith(\".new\"))):\n s = write_newick(node, outfile, length_fmt, True, clobber)\n if not outfile:\n return s\n\ndef write_newick(node, outfile=None, length_fmt=\":%g\", end=False,\n clobber=False):\n if not node.isleaf:\n node_str = \"(%s)%s\" % \\\n (\",\".join([ write_newick(child, outfile, length_fmt,\n False, clobber)\n for child in node.children ]),\n (node.label or \"\")\n )\n else:\n node_str = \"%s\" % node.label\n\n if node.length is not None:\n length_str = length_fmt % node.length\n else:\n length_str = \"\"\n\n semicolon = \"\"\n if end:\n semicolon = \";\"\n s = \"%s%s%s\" % (node_str, length_str, semicolon)\n if end and outfile:\n flag = False\n if type(outfile) in types.StringTypes:\n if not clobber:\n assert not os.path.isfile(outfile), \"File '%s' exists! (Set clobber=True to overwrite)\" % outfile\n flag = True\n outfile = file(outfile, \"w\")\n outfile.write(s)\n if flag:\n outfile.close()\n return s\n\ndef read(data, format=None, treename=None, ttable=None):\n \"\"\"\n Read a single tree from *data*, which can be a Newick string, a\n file name, or a file-like object with `tell` and 'read`\n methods. *treename* is an optional string that will be attached to\n all created nodes.\n\n Args:\n data: A file or file-like object or newick string\n\n Returns:\n Node: The root node.\n \"\"\"\n import newick\n StringTypes = types.StringTypes\n\n def strip(s):\n fname = os.path.split(s)[-1]\n head, tail = os.path.splitext(fname)\n tail = tail.lower()\n if tail in (\".nwk\", \".tre\", \".tree\", \".newick\", \".nex\"):\n return head\n else:\n return fname\n\n if (not format):\n if (type(data) in StringTypes) and os.path.isfile(data):\n s = data.lower()\n for tail in \".nex\", \".nexus\", \".tre\":\n if s.endswith(tail):\n format=\"nexus\"\n break\n\n if (not format):\n format = \"newick\"\n\n if format == \"newick\":\n if type(data) in StringTypes:\n if os.path.isfile(data):\n treename = strip(data)\n return newick.parse(file(data), treename=treename,\n ttable=ttable)\n else:\n return newick.parse(data, ttable=ttable)\n\n elif (hasattr(data, \"tell\") and hasattr(data, \"read\")):\n treename = strip(getattr(data, \"name\", None))\n return newick.parse(data, treename=treename, ttable=ttable)\n elif format == \"nexus-dendropy\":\n import dendropy\n if type(data) in StringTypes:\n if os.path.isfile(data):\n treename = strip(data)\n return newick.parse(\n str(dendropy.Tree.get_from_path(data, \"nexus\")),\n treename=treename\n )\n else:\n return newick.parse(\n str(dendropy.Tree.get_from_string(data, \"nexus\"))\n )\n\n elif (hasattr(data, \"tell\") and hasattr(data, \"read\")):\n treename = strip(getattr(data, \"name\", None))\n return newick.parse(\n str(dendropy.Tree.get_from_stream(data, \"nexus\")),\n treename=treename\n )\n else:\n pass\n\n elif format == \"nexus\":\n if type(data) in StringTypes:\n if os.path.isfile(data):\n with open(data) as infile:\n rec = newick.nexus_iter(infile).next()\n if rec: return rec.parse()\n else:\n rec = newick.nexus_iter(StringIO(data)).next()\n if rec: return rec.parse()\n else:\n rec = newick.nexus_iter(data).next()\n if rec: return rec.parse()\n else:\n # implement other tree formats here (nexus, nexml etc.)\n raise IOError, \"format '%s' not implemented yet\" % format\n\n raise IOError, \"unable to read tree from '%s'\" % data\n\ndef readmany(data, format=\"newick\"):\n \"\"\"Iterate over trees from a source.\"\"\"\n if type(data) in types.StringTypes:\n if os.path.isfile(data):\n data = open(data)\n else:\n data = StringIO(data)\n\n if format == \"newick\":\n for line in data:\n yield newick.parse(line)\n elif format == \"nexus\":\n for rec in newick.nexus_iter(data):\n yield rec.parse()\n else:\n raise Exception, \"format '%s' not recognized\" % format\n data.close()\n\n## def randomly_resolve(n):\n## assert len(n.children)>2\n\n## def leaf_mrcas(root):\n## from itertools import product, izip, tee\n## from collections import OrderedDict\n## from numpy import empty\n## mrca = OrderedDict()\n## def pairwise(iterable, tee=tee, izip=izip):\n## a, b = tee(iterable)\n## next(b, None)\n## return izip(a, b)\n## def f(n):\n## if n.isleaf:\n## od = OrderedDict(); od[n] = n.length\n## return od\n## d = [ f(c) for c in n.children ]\n## for i, j in pairwise(xrange(len(d))):\n## di = d[i]; dj =d[j]\n## for ni, niv in di.iteritems():\n## for nj, njv in dj.iteritems():\n## mrca[(ni,nj)] = n\n## d[j].update(di)\n## return d[j]\n## f(root)\n## return mrca\n\ndef C(leaves, internals):\n from scipy.sparse import lil_matrix\n m = lil_matrix((len(internals), len(leaves)))\n for lf in leaves:\n v = lf.length if lf.length is not None else 1\n for n in lf.rootpath():\n m[n.ii,lf.li] = v\n v += n.length if n.length is not None else 1\n return m.tocsc()\n", "ivy/treebase.py": "\"\"\"\nFunctions to get trees and character data from treebase\n\"\"\"\n\nfrom urllib2 import urlopen\nfrom lxml import etree\nfrom collections import defaultdict\nfrom storage import Storage\nimport sys, re\n\n# \"http://purl.org/phylo/treebase/phylows/study/TB2:S11152\"\n\nTREEBASE_WEBSERVICE = \"http://purl.org/phylo/treebase/phylows\"\nNEXML_NAMESPACE = \"http://www.nexml.org/2009\"\nNEXML = \"{%s}\" % NEXML_NAMESPACE\nUNIPROT = \"http://purl.uniprot.org/taxonomy/\"\nNAMEBANK = (\"http://www.ubio.org/authority/metadata.php?\"\n \"lsid=urn:lsid:ubio.org:namebank:\")\n\nROW_SEGMENTS = (\"http://treebase.org/treebase-web/search/study/\"\n \"rowSegmentsTSV.html?matrixid=\")\n\nMETA_DATATYPE = {\n \"xsd:long\": int,\n \"xsd:integer\": int,\n \"xsd:string\": str\n }\n\nAMBIG_RE = re.compile(r'([{][a-zA-Z]+[}])')\n\ndef fetch_study(study_id, format=\"nexml\"):\n \"\"\"\n Get a study from treebase in one of various formats\n\n Args:\n study_id (str): The id of the study\n format (str): One of [\"rdf\", \"html\", \"nexml\", \"nexus\"]\n Returns:\n Str representing a nexus file (if format = \"nexus\")\n\n OR\n\n An lxml etree object\n \"\"\"\n try: study_id = \"S%s\" % int(study_id)\n except ValueError: pass\n\n # format is one of [\"rdf\", \"html\", \"nexml\", \"nexus\"]\n url = \"%s/study/TB2:%s?format=%s\" % (TREEBASE_WEBSERVICE, study_id, format)\n if format==\"nexus\":\n return urlopen(url).read()\n else:\n return etree.parse(url)\n\ndef parse_chars(e, otus):\n v = []\n for chars in e.findall(NEXML+\"characters\"):\n c = Storage(chars.attrib)\n c.states = parse_states(chars)\n c.meta = Storage()\n for meta in chars.findall(NEXML+\"meta\"):\n a = meta.attrib\n if a.get(\"content\"):\n value = META_DATATYPE[a[\"datatype\"]](a[\"content\"])\n c.meta[a[\"property\"]] = value\n c.matrices = []\n for matrix in chars.findall(NEXML+\"matrix\"):\n m = Storage()\n m.rows = []\n for row in matrix.findall(NEXML+\"row\"):\n r = Storage(row.attrib)\n r.otu = otus[r.otu]\n s = row.findall(NEXML+\"seq\")[0].text\n substrs = []\n for ss in AMBIG_RE.split(s):\n if ss.startswith(\"{\"):\n key = frozenset(ss[1:-1])\n val = c.states.states2symb.get(key)\n if key and not val:\n sys.stderr.write(\"missing ambig symbol for %s\\n\" %\n \"\".join(sorted(key)))\n ss = val or \"?\"\n substrs.append(ss)\n s = \"\".join(substrs)\n r.seq = s\n m.rows.append(r)\n c.matrices.append(m)\n v.append(c)\n return v\n\ndef parse_trees(e, otus):\n \"\"\"\n Get trees from an etree object\n\n Args:\n e: A nexml document parsed by etree\n otus: OTUs returned by parse_otus\n Returns:\n list: A list of ivy Storage objects each\n containing every node of a tree.\n \"\"\"\n from tree import Node\n v = []\n for tb in e.findall(NEXML+\"trees\"):\n for te in tb.findall(NEXML+\"tree\"):\n t = Storage()\n t.attrib = Storage(te.attrib)\n t.nodes = {}\n for n in te.findall(NEXML+\"node\"):\n node = Node()\n if n.attrib.get(\"otu\"):\n node.isleaf = True\n node.otu = otus[n.attrib[\"otu\"]]\n node.label = node.otu.label\n t.nodes[n.attrib[\"id\"]] = node\n for edge in te.findall(NEXML+\"edge\"):\n d = edge.attrib\n n = t.nodes[d[\"target\"]]\n p = t.nodes[d[\"source\"]]\n length = d.get(\"length\")\n if length:\n n.length = float(length)\n p.add_child(n)\n r = [ n for n in t.nodes.values() if not n.parent ]\n assert len(r)==1\n r = r[0]\n r.isroot = True\n for i, n in enumerate(r): n.id = i+1\n t.root = r\n v.append(t)\n return v\n\ndef parse_otus(e):\n \"\"\"\n Get OTUs from an etree object\n\n Args:\n e: A nexml document parsed by etree\n Returns:\n dict: A dict mapping keys to OTUs contained in ivy Storage objects\n \"\"\"\n v = {}\n for otus in e.findall(NEXML+\"otus\"):\n for x in otus.findall(NEXML+\"otu\"):\n otu = Storage()\n otu.id = x.attrib[\"id\"]\n otu.label = x.attrib[\"label\"]\n for meta in x.iterchildren():\n d = meta.attrib\n p = d.get(\"property\")\n if p and p == \"tb:identifier.taxon\":\n otu.tb_taxid = d[\"content\"]\n elif p and p == \"tb:identifier.taxonVariant\":\n otu.tb_taxid_variant = d[\"content\"]\n h = d.get(\"href\")\n if h and h.startswith(NAMEBANK):\n otu.namebank_id = int(h.replace(NAMEBANK, \"\"))\n elif h and h.startswith(UNIPROT):\n otu.ncbi_taxid = int(h.replace(UNIPROT, \"\"))\n v[otu.id] = otu\n return v\n\ndef parse_nexml(doc):\n \"\"\"\n Parse an etree ElementTree\n\n Args:\n doc: An etree ElementTree or a file that can be parsed into\n an etree ElementTree with etree.parse\n Returns:\n An ivy Storage object containing all the information from the\n nexml file: Characters, metadata, OTUs, and trees.\n \"\"\"\n if not isinstance(doc, (etree._ElementTree, etree._Element)):\n doc = etree.parse(doc)\n meta = {}\n for child in doc.findall(NEXML+\"meta\"):\n if \"content\" in child.attrib:\n d = child.attrib\n key = d[\"property\"]\n val = META_DATATYPE[d[\"datatype\"]](d[\"content\"])\n if (key in meta) and val:\n if isinstance(meta[key], list):\n meta[key].append(val)\n else:\n meta[key] = [meta[key], val]\n else:\n meta[key] = val\n\n otus = parse_otus(doc)\n\n return Storage(meta = meta,\n otus = otus,\n chars = parse_chars(doc, otus),\n trees = parse_trees(doc, otus))\n\ndef parse_states(e):\n \"\"\"e is a characters element\"\"\"\n f = e.findall(NEXML+\"format\")[0]\n sts = f.findall(NEXML+\"states\")[0]\n states2symb = {}\n symb2states = {}\n id2symb = {}\n for child in sts.iterchildren():\n t = child.tag\n if t == NEXML+\"state\":\n k = child.attrib[\"id\"]\n v = child.attrib[\"symbol\"]\n id2symb[k] = v\n states2symb[v] = v\n symb2states[v] = v\n elif t == NEXML+\"uncertain_state_set\":\n k = child.attrib[\"id\"]\n v = child.attrib[\"symbol\"]\n id2symb[k] = v\n memberstates = []\n for memb in child.findall(NEXML+\"member\"):\n sid = memb.attrib[\"state\"]\n symb = id2symb[sid]\n for x in symb2states[symb]: memberstates.append(x)\n memberstates = frozenset(memberstates)\n symb2states[v] = memberstates\n states2symb[memberstates] = v\n return Storage(states2symb=states2symb,\n symb2states=symb2states,\n id2symb=id2symb)\n\ndef parse_charsets(study_id):\n from cStringIO import StringIO\n nx = StringIO(fetch_study(study_id, 'nexus'))\n d = {}\n for line in nx.readlines():\n if line.strip().startswith(\"CHARSET \"):\n v = line.strip().split()\n label = v[1]\n first, last = map(int, line.split()[-1][:-1].split(\"-\"))\n d[label] = (first-1, last-1)\n return d\n\nif __name__ == \"__main__\":\n import sys\n from pprint import pprint\n e = fetch_study('S11152', 'nexus')\n #print e\n #e.write(sys.stdout, pretty_print=True)\n\n ## e = etree.parse('/tmp/tmp.xml')\n ## x = parse_nexml(e)\n ## pprint(x)\n", "ivy/vis/alignment.py": "\"\"\"\ninteractive viewers for trees, etc. using matplotlib\n\"\"\"\nfrom collections import defaultdict\nfrom ..storage import Storage\nfrom .. import align, sequtil\nimport matplotlib, numpy, types\nimport matplotlib.pyplot as pyplot\nfrom matplotlib.figure import SubplotParams, Figure\nfrom matplotlib.axes import Axes, subplot_class_factory\nfrom matplotlib.patches import Rectangle\nfrom matplotlib.collections import PatchCollection\nfrom matplotlib.widgets import RectangleSelector\nfrom matplotlib.transforms import Bbox, offset_copy, IdentityTransform\nfrom matplotlib import colors as mpl_colors\nfrom matplotlib.ticker import MaxNLocator, FuncFormatter, NullLocator\nfrom mpl_toolkits.axes_grid1.inset_locator import inset_axes\nfrom Bio.Align import MultipleSeqAlignment\n\nmatplotlib.rcParams['path.simplify'] = False\n\nclass UpdatingRect(Rectangle):\n def __call__(self, p):\n self.set_bounds(*p.viewLim.bounds)\n p.figure.canvas.draw_idle()\n\nclass AlignmentFigure:\n def __init__(self, aln, name=None, div=0.25, overview=True):\n if isinstance(aln, MultipleSeqAlignment):\n self.aln = aln\n else:\n self.aln = align.read(aln)\n self.name = name\n self.div_value = div\n pars = SubplotParams(\n left=0.2, right=1, bottom=0.05, top=1, wspace=0.01\n )\n fig = pyplot.figure(subplotpars=pars, facecolor=\"white\")\n self.figure = fig\n self.initialize_subplots(overview)\n self.show()\n self.connect_events()\n \n def initialize_subplots(self, overview=False):\n ## p = AlignmentPlot(self.figure, 212, aln=self.aln)\n p = AlignmentPlot(self.figure, 111, aln=self.aln, app=self)\n self.detail = self.figure.add_subplot(p)\n self.detail.plot_aln()\n if overview:\n self.overview = inset_axes(\n self.detail, width=\"30%\", height=\"20%\", loc=1\n )\n self.overview.xaxis.set_major_locator(NullLocator())\n self.overview.yaxis.set_major_locator(NullLocator())\n self.overview.imshow(\n self.detail.array, interpolation='nearest', aspect='auto',\n origin='lower'\n )\n rect = UpdatingRect(\n [0,0], 0, 0, facecolor='black', edgecolor='cyan', alpha=0.5\n )\n self.overview.zoomrect = rect\n rect.target = self.detail\n self.detail.callbacks.connect('xlim_changed', rect)\n self.detail.callbacks.connect('ylim_changed', rect)\n self.overview.add_patch(rect)\n rect(self.overview)\n\n else:\n self.overview = None\n \n def show(self):\n self.figure.show()\n\n def connect_events(self):\n mpl_connect = self.figure.canvas.mpl_connect\n mpl_connect(\"button_press_event\", self.onclick)\n mpl_connect(\"button_release_event\", self.onbuttonrelease)\n mpl_connect(\"scroll_event\", self.onscroll)\n mpl_connect(\"pick_event\", self.onpick)\n mpl_connect(\"motion_notify_event\", self.ondrag)\n mpl_connect(\"key_press_event\", self.onkeypress)\n mpl_connect(\"axes_enter_event\", self.axes_enter)\n mpl_connect(\"axes_leave_event\", self.axes_leave)\n\n @staticmethod\n def axes_enter(e):\n ax = e.inaxes\n ax._active = True\n\n @staticmethod\n def axes_leave(e):\n ax = e.inaxes\n ax._active = False\n\n @staticmethod\n def onselect(estart, estop):\n b = estart.button\n ## print b, estart.key\n\n @staticmethod\n def onkeypress(e):\n ax = e.inaxes\n k = e.key\n if ax and k:\n if k == 't':\n ax.home()\n elif k == \"down\":\n ax.scroll(0, -0.1)\n elif k == \"up\":\n ax.scroll(0, 0.1)\n elif k == \"left\":\n ax.scroll(-0.1, 0)\n elif k == \"right\":\n ax.scroll(0.1, 0)\n elif k in '=+':\n ax.zoom(0.1,0.1)\n elif k == '-':\n ax.zoom(-0.1,-0.1)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def ondrag(e):\n ax = e.inaxes\n button = e.button\n if ax and button == 2:\n if not ax.pan_start:\n ax.pan_start = (e.xdata, e.ydata)\n return\n x, y = ax.pan_start\n xdelta = x - e.xdata\n ydelta = y - e.ydata\n x0, x1 = ax.get_xlim()\n xspan = x1-x0\n y0, y1 = ax.get_ylim()\n yspan = y1 - y0\n midx = (x1+x0)*0.5\n midy = (y1+y0)*0.5\n ax.set_xlim(midx+xdelta-xspan*0.5, midx+xdelta+xspan*0.5)\n ax.set_ylim(midy+ydelta-yspan*0.5, midy+ydelta+yspan*0.5)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onbuttonrelease(e):\n ax = e.inaxes\n button = e.button\n if button == 2:\n ## print \"pan end\"\n ax.pan_start = None\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onpick(e):\n ax = e.mouseevent.inaxes\n if ax:\n ax.picked(e)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onscroll(e):\n ax = e.inaxes\n if ax:\n b = e.button\n ## print b\n k = e.key\n if k == None and b ==\"up\":\n ax.zoom(0.1,0.1)\n elif k == None and b ==\"down\":\n ax.zoom(-0.1,-0.1)\n elif k == \"shift\" and b == \"up\":\n ax.zoom_cxy(0.1, 0, e.xdata, e.ydata)\n elif k == \"shift\" and b == \"down\":\n ax.zoom_cxy(-0.1, 0, e.xdata, e.ydata)\n elif k == \"control\" and b == \"up\":\n ax.zoom_cxy(0, 0.1, e.xdata, e.ydata)\n elif k == \"control\" and b == \"down\":\n ax.zoom_cxy(0, -0.1, e.xdata, e.ydata)\n elif k == \"d\" and b == \"up\":\n ax.scroll(0, 0.1)\n elif (k == \"d\" and b == \"down\"):\n ax.scroll(0, -0.1)\n elif k == \"c\" and b == \"up\":\n ax.scroll(-0.1, 0)\n elif k == \"c\" and b == \"down\":\n ax.scroll(0.1, 0)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onclick(e):\n ax = e.inaxes\n if (ax and e.button==1 and hasattr(ax, \"zoomrect\") and ax.zoomrect):\n # overview clicked; reposition zoomrect\n r = ax.zoomrect\n x = e.xdata\n y = e.ydata\n arr = ax.transData.inverted().transform(r.get_extents())\n xoff = (arr[1][0]-arr[0][0])*0.5\n yoff = (arr[1][1]-arr[0][1])*0.5\n r.target.set_xlim(x-xoff,x+xoff)\n r.target.set_ylim(y-yoff,y+yoff)\n r(r.target)\n ax.figure.canvas.draw_idle()\n\n elif ax and e.button==2:\n ## print \"pan start\", (e.xdata, e.ydata)\n ax.pan_start = (e.xdata, e.ydata)\n ax.figure.canvas.draw_idle()\n\n elif ax and hasattr(ax, \"aln\") and ax.aln:\n x = int(e.xdata+0.5); y = int(e.ydata+0.5)\n aln = ax.aln\n if (0 <= x <= ax.nchar) and (0 <= y <= ax.ntax):\n seq = aln[y]; char = seq[x]\n if char not in '-?':\n v = sequtil.gapidx(seq)\n i = (v[1]==x).nonzero()[0][0]\n print (\"%s: row %s, site %s: '%s', seqpos %s\"\n % (seq.id, y, x, char, i))\n else:\n print \"%s: row %s, site %s: '%s'\" % (seq.id, y, x, char)\n\n def zoom(self, factor=0.1):\n \"Zoom both axes by *factor* (relative display size).\"\n self.detail.zoom(factor, factor)\n self.figure.canvas.draw_idle()\n\n def __get_selection(self):\n return self.detail.extract_selected()\n selected = property(__get_selection)\n \nclass Alignment(Axes):\n \"\"\"\n matplotlib.axes.Axes subclass for rendering sequence alignments.\n \"\"\"\n def __init__(self, fig, rect, *args, **kwargs):\n self.aln = kwargs.pop(\"aln\")\n nrows = len(self.aln)\n ncols = self.aln.get_alignment_length()\n self.alnidx = numpy.arange(ncols)\n self.app = kwargs.pop(\"app\", None)\n self.showy = kwargs.pop('showy', True)\n Axes.__init__(self, fig, rect, *args, **kwargs)\n rgb = mpl_colors.colorConverter.to_rgb\n gray = rgb('gray')\n d = defaultdict(lambda:gray)\n d[\"A\"] = rgb(\"red\")\n d[\"a\"] = rgb(\"red\")\n d[\"C\"] = rgb(\"blue\")\n d[\"c\"] = rgb(\"blue\")\n d[\"G\"] = rgb(\"green\")\n d[\"g\"] = rgb(\"green\")\n d[\"T\"] = rgb(\"yellow\")\n d[\"t\"] = rgb(\"yellow\")\n self.cmap = d\n self.selector = RectangleSelector(\n self, self.select_rectangle, useblit=True\n )\n def f(e):\n if e.button != 1: return True\n else: return RectangleSelector.ignore(self.selector, e)\n self.selector.ignore = f\n self.selected_rectangle = Rectangle(\n [0,0],0,0, facecolor='white', edgecolor='cyan', alpha=0.3\n )\n self.add_patch(self.selected_rectangle)\n self.highlight_find_collection = None\n\n def plot_aln(self):\n cmap = self.cmap\n self.ntax = len(self.aln); self.nchar = self.aln.get_alignment_length()\n a = numpy.array([ [ cmap[base] for base in x.seq ]\n for x in self.aln ])\n self.array = a\n self.imshow(a, interpolation='nearest', aspect='auto', origin='lower')\n y = [ i+0.5 for i in xrange(self.ntax) ]\n labels = [ x.id for x in self.aln ]\n ## locator.bin_boundaries(1,ntax)\n ## locator.view_limits(1,ntax)\n if self.showy:\n locator = MaxNLocator(nbins=50, integer=True)\n self.yaxis.set_major_locator(locator)\n def fmt(x, pos=None):\n if x<0: return \"\"\n try: return labels[int(round(x))]\n except: pass\n return \"\"\n self.yaxis.set_major_formatter(FuncFormatter(fmt))\n else:\n self.yaxis.set_major_locator(NullLocator())\n \n return self\n\n def select_rectangle(self, e0, e1):\n x0, x1 = map(int, sorted((e0.xdata+0.5, e1.xdata+0.5)))\n y0, y1 = map(int, sorted((e0.ydata+0.5, e1.ydata+0.5)))\n self.selected_chars = (x0, x1)\n self.selected_taxa = (y0, y1)\n self.selected_rectangle.set_bounds(x0-0.5,y0-0.5,x1-x0+1,y1-y0+1)\n self.app.figure.canvas.draw_idle()\n\n def highlight_find(self, substr):\n if not substr:\n if self.highlight_find_collection:\n self.highlight_find_collection.remove()\n self.highlight_find_collection = None\n return\n \n N = len(substr)\n v = []\n for y, x in align.find(self.aln, substr):\n r = Rectangle(\n [x-0.5,y-0.5], N, 1,\n facecolor='cyan', edgecolor='cyan', alpha=0.7\n )\n v.append(r)\n if self.highlight_find_collection:\n self.highlight_find_collection.remove()\n c = PatchCollection(v, True)\n self.highlight_find_collection = self.add_collection(c)\n self.app.figure.canvas.draw_idle()\n\n def extract_selected(self):\n r0, r1 = self.selected_taxa\n c0, c1 = self.selected_chars\n return self.aln[r0:r1+1,c0:c1+1]\n\n def zoom_cxy(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view, with a fixed data point (cx, cy)\n \"\"\"\n transform = self.transData.inverted().transform\n xlim = self.get_xlim(); xmid = sum(xlim)*0.5\n ylim = self.get_ylim(); ymid = sum(ylim)*0.5\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = xmid-x0; deltay = ymid-y0\n cx = cx or xmid; cy = cy or ymid\n xoff = (cx-xmid)*x\n self.set_xlim(xmid-deltax+xoff, xmid+deltax+xoff)\n yoff = (cy-ymid)*y\n self.set_ylim(ymid-deltay+yoff, ymid+deltay+yoff)\n\n def zoom(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view.\n \"\"\"\n # get the function to convert display coordinates to data\n # coordinates\n transform = self.transData.inverted().transform\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = x0 - xlim[0]; deltay = y0 - ylim[0]\n self.set_xlim(xlim[0]+deltax, xlim[1]-deltax)\n self.set_ylim(ylim[0]+deltay, ylim[1]-deltay)\n\n def center_y(self, y):\n ymin, ymax = self.get_ylim()\n yoff = (ymax - ymin) * 0.5\n self.set_ylim(y-yoff, y+yoff)\n\n def center_x(self, x, offset=0.3):\n xmin, xmax = self.get_xlim()\n xspan = xmax - xmin\n xoff = xspan*0.5 + xspan*offset\n self.set_xlim(x-xoff, x+xoff)\n\n def scroll(self, x, y):\n x0, x1 = self.get_xlim()\n y0, y1 = self.get_ylim()\n xd = (x1-x0)*x\n yd = (y1-y0)*y\n self.set_xlim(x0+xd, x1+xd)\n self.set_ylim(y0+yd, y1+yd)\n\n def home(self):\n self.set_xlim(0, self.nchar)\n self.set_ylim(self.ntax, 0)\n\nAlignmentPlot = subplot_class_factory(Alignment)\n\n", "ivy/vis/hardcopy.py": "import os, matplotlib\nfrom matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\nfrom matplotlib.figure import Figure\nimport tree\nfrom axes_utils import adjust_limits\nimport tempfile\n\n## class TreeFigure:\n## def __init__(self):\n## pass\n\nmatplotlib.rcParams[\"xtick.direction\"] = \"out\"\n\nclass TreeFigure:\n def __init__(self, root, relwidth=0.5, leafpad=1.5, name=None,\n support=70.0, scaled=True, mark_named=True,\n leaf_fontsize=10, branch_fontsize=10,\n branch_width=1, branch_color=\"black\",\n highlight_support=True,\n branchlabels=True, leaflabels=True, decorators=[],\n xoff=0, yoff=0,\n xlim=None, ylim=None,\n height=None, width=None):\n self.root = root\n self.relwidth = relwidth\n self.leafpad = leafpad\n self.name = name\n self.support = support\n self.scaled = scaled\n self.mark_named = mark_named\n self.leaf_fontsize = leaf_fontsize\n self.branch_fontsize = branch_fontsize\n self.branch_width = branch_width\n self.branch_color = branch_color\n self.highlight_support = highlight_support\n self.branchlabels = branchlabels\n self.leaflabels = leaflabels\n self.decorators = decorators\n self.xoff = xoff\n self.yoff = yoff\n\n nleaves = len(root.leaves())\n self.dpi = 72.0\n h = height or (nleaves*self.leaf_fontsize*self.leafpad)/self.dpi\n self.height = h\n self.width = width or self.height*self.relwidth\n ## p = min(self.width, self.height)*0.1\n ## self.height += p\n ## self.width += p\n self.figure = Figure(figsize=(self.width, self.height), dpi=self.dpi)\n self.canvas = FigureCanvas(self.figure)\n self.axes = self.figure.add_axes(\n tree.TreePlot(self.figure, 1,1,1,\n support=self.support,\n scaled=self.scaled,\n mark_named=self.mark_named,\n leaf_fontsize=self.leaf_fontsize,\n branch_fontsize=self.branch_fontsize,\n branch_width=self.branch_width,\n branch_color=self.branch_color,\n highlight_support=self.highlight_support,\n branchlabels=self.branchlabels,\n leaflabels=self.leaflabels,\n interactive=False,\n decorators=self.decorators,\n xoff=self.xoff, yoff=self.yoff,\n name=self.name).plot_tree(self.root)\n )\n self.axes.spines[\"top\"].set_visible(False)\n self.axes.spines[\"left\"].set_visible(False)\n self.axes.spines[\"right\"].set_visible(False)\n self.axes.spines[\"bottom\"].set_smart_bounds(True)\n self.axes.xaxis.set_ticks_position(\"bottom\")\n\n for v in self.axes.node2label.values():\n v.set_visible(True)\n\n ## for k, v in self.decorators:\n ## func, args, kwargs = v\n ## func(self.axes, *args, **kwargs)\n\n self.canvas.draw()\n ## self.axes.home()\n ## adjust_limits(self.axes)\n self.axes.set_position([0.05,0.05,0.95,0.95])\n\n @property\n def detail(self):\n return self.axes\n \n def savefig(self, fname):\n root, ext = os.path.splitext(fname)\n buf = tempfile.TemporaryFile()\n for i in range(3):\n self.figure.savefig(buf, format=ext[1:].lower())\n self.home()\n buf.seek(0)\n buf.close()\n self.figure.savefig(fname)\n\n def set_relative_width(self, relwidth):\n w, h = self.figure.get_size_inches()\n self.figure.set_figwidth(h*relwidth)\n\n def autoheight(self):\n \"adjust figure height to show all leaf labels\"\n nleaves = len(self.root.leaves())\n h = (nleaves*self.leaf_fontsize*self.leafpad)/self.dpi\n self.height = h\n self.figure.set_size_inches(self.width, self.height)\n self.axes.set_ylim(-2, nleaves+2)\n\n def home(self):\n self.axes.home()\n", "ivy/vis/symbols.py": "\"\"\"\nConvenience functions for drawing shapes on TreePlots.\n\"\"\"\ntry:\n import Image\nexcept ImportError:\n from PIL import Image\nfrom numpy import pi\nfrom matplotlib.collections import RegularPolyCollection, CircleCollection\nfrom matplotlib.transforms import offset_copy\nfrom matplotlib.patches import Rectangle, Wedge, Circle, PathPatch\nfrom matplotlib.offsetbox import DrawingArea\nfrom itertools import izip_longest\nfrom matplotlib.axes import Axes\nfrom numpy import array\nfrom matplotlib.path import Path\n\n\ntry:\n from matplotlib.offsetbox import OffsetImage, AnnotationBbox\nexcept ImportError:\n pass\nfrom ..tree import Node\nimport colors as _colors\n\ndef _xy(plot, p):\n if isinstance(p, Node):\n c = plot.n2c[p]\n p = (c.x, c.y)\n elif isinstance(p, (list, tuple)):\n p = [ _xy(plot, x) for x in p ]\n else:\n pass\n return p\n\n\n\ndef image(plot, p, imgfile,\n maxdim=100, border=0,\n xoff=4, yoff=4,\n halign=0.0, valign=0.5,\n xycoords='data',\n boxcoords=('offset points')):\n \"\"\"\n Add images to plot\n\n Args:\n plot (Tree): A Tree plot instance\n p (Node): A node object\n imgfile (str): A path to an image\n maxdim (float): Maximum dimension of image. Optional,\n defaults to 100.\n border: RR: What does border do? -CZ\n xoff, yoff (float): X and Y offset. Optional, defaults to 4\n halign, valign (float): Horizontal and vertical alignment within\n box. Optional, defaults to 0.0 and 0.5, respectively.\n\n \"\"\"\n if xycoords == \"label\":\n xycoords = plot.node2label[p]\n x, y = (1, 0.5)\n else:\n x, y = _xy(plot, p)\n img = Image.open(imgfile)\n if max(img.size) > maxdim:\n img.thumbnail((maxdim, maxdim))\n imgbox = OffsetImage(img)\n abox = AnnotationBbox(imgbox, (x, y),\n xybox=(xoff, yoff),\n xycoords=xycoords,\n box_alignment=(halign,valign),\n pad=0.0,\n boxcoords=boxcoords)\n plot.add_artist(abox)\n plot.figure.canvas.draw_idle()\n\ndef images(plot, p, imgfiles,\n maxdim=100, border=0,\n xoff=4, yoff=4,\n halign=0.0, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n \"\"\"\n Add many images to plot at once\n\n Args:\n Plot (Tree): A Tree plot instance\n p (list): A list of node objects\n imgfile (list): A list of strs containing paths to image files.\n Must be the same length as p.\n maxdim (float): Maximum dimension of image. Optional,\n defaults to 100.\n border: RR: What does border do? -CZ\n xoff, yoff (float): X and Y offset. Optional, defaults to 4\n halign, valign (float): Horizontal and vertical alignment within\n box. Optional, defaults to 0.0 and 0.5, respectively.\n\n \"\"\"\n for x, f in zip(p, imgfiles):\n image(plot, x, f, maxdim, border, xoff, yoff, halign, valign,\n xycoords, boxcoords)\n\ndef pie(plot, p, values, colors=None, size=16, norm=True,\n xoff=0, yoff=0,\n halign=0.5, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n \"\"\"\n Draw a pie chart\n\n Args:\n plot (Tree): A Tree plot instance\n p (Node): A Node object\n values (list): A list of floats.\n colors (list): A list of strings to pull colors from. Optional.\n size (float): Diameter of the pie chart\n norm (bool): Whether or not to normalize the values so they\n add up to 360\n xoff, yoff (float): X and Y offset. Optional, defaults to 0\n halign, valign (float): Horizontal and vertical alignment within\n box. Optional, defaults to 0.5\n\n \"\"\"\n x, y = _xy(plot, p)\n da = DrawingArea(size, size); r = size*0.5; center = (r,r)\n x0 = 0\n S = 360.0\n if norm: S = 360.0/sum(values)\n if not colors:\n c = _colors.tango()\n colors = [ c.next() for v in values ]\n for i, v in enumerate(values):\n theta = v*S\n if v: da.add_artist(Wedge(center, r, x0, x0+theta,\n fc=colors[i], ec='none'))\n x0 += theta\n box = AnnotationBbox(da, (x,y), pad=0, frameon=False,\n xybox=(xoff, yoff),\n xycoords=xycoords,\n box_alignment=(halign,valign),\n boxcoords=boxcoords)\n plot.add_artist(box)\n plot.figure.canvas.draw_idle()\n return box\n\ndef hbar(plot, p, values, colors=None, height=16,\n xoff=0, yoff=0,\n halign=1, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n x, y = _xy(plot, p)\n h = height; w = sum(values) * height#; yoff=h*0.5\n da = DrawingArea(w, h)\n x0 = -sum(values)\n if not colors:\n c = _colors.tango()\n colors = [ c.next() for v in values ]\n for i, v in enumerate(values):\n if v: da.add_artist(Rectangle((x0,0), v*h, h, fc=colors[i], ec='none'))\n x0 += v*h\n box = AnnotationBbox(da, (x,y), pad=0, frameon=False,\n xybox=(xoff, yoff),\n xycoords=xycoords,\n box_alignment=(halign,valign),\n boxcoords=boxcoords)\n plot.add_artist(box)\n plot.figure.canvas.draw_idle()\n\ndef hbars(plot, p, values, colors=None, height=16,\n xoff=0, yoff=0,\n halign=1, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n for x, v in zip(p, values):\n hbar(plot, x, v, colors, height, xoff, yoff, halign, valign,\n xycoords, boxcoords)\n\ndef squares(plot, p, colors='r', size=15, xoff=0, yoff=0, alpha=1.0,\n zorder=1000):\n \"\"\"\n Draw a square at given node\n\n Args:\n plot (Tree): A Tree plot instance\n p: A node or list of nodes\n colors: Str or list of strs. Colors of squares to be drawn.\n Optional, defaults to 'r' (red)\n size (float): Size of the squares. Optional, defaults to 15\n xoff, yoff (float): Offset for x and y dimensions. Optional,\n defaults to 0.\n alpha (float): between 0 and 1. Alpha transparency of squares.\n Optional, defaults to 1 (fully opaque)\n zorder (int): The drawing order. Higher numbers appear on top\n of lower numbers. Optional, defaults to 1000.\n\n \"\"\"\n points = _xy(plot, p)\n trans = offset_copy(\n plot.transData, fig=plot.figure, x=xoff, y=yoff, units='points')\n\n col = RegularPolyCollection(\n numsides=4, rotation=pi*0.25, sizes=(size*size,),\n offsets=points, facecolors=colors, transOffset=trans,\n edgecolors='none', alpha=alpha, zorder=zorder\n )\n\n plot.add_collection(col)\n plot.figure.canvas.draw_idle()\n\ndef tipsquares(plot, p, colors='r', size=15, pad=2, edgepad=10):\n \"\"\"\n RR: Bug with this function. If you attempt to call it with a list as an\n argument for p, it will not only not work (expected) but it will also\n make it so that you can't interact with the tree figure (gives errors when\n you try to add symbols, select nodes, etc.) -CZ\n\n Add square after tip label, anchored to the side of the plot\n\n Args:\n plot (Tree): A Tree plot instance.\n p (Node): A Node object (Should be a leaf node).\n colors (str): olor of drawn square. Optional, defaults to 'r' (red)\n size (float): Size of square. Optional, defaults to 15\n pad: RR: I am unsure what this does. Does not seem to have visible\n effect when I change it. -CZ\n edgepad (float): Padding from square to edge of plot. Optional,\n defaults to 10.\n\n \"\"\"\n x, y = _xy(plot, p) # p is a single node or point in data coordinates\n n = len(colors)\n da = DrawingArea(size*n+pad*(n-1), size, 0, 0)\n sx = 0\n for c in colors:\n sq = Rectangle((sx,0), size, size, color=c)\n da.add_artist(sq)\n sx += size+pad\n box = AnnotationBbox(da, (x, y), xybox=(-edgepad,y),\n frameon=False,\n pad=0.0,\n xycoords='data',\n box_alignment=(1, 0.5),\n boxcoords=('axes points','data'))\n plot.add_artist(box)\n plot.figure.canvas.draw_idle()\n\n\ndef circles(plot, p, colors='g', size=15, xoff=0, yoff=0):\n \"\"\"\n Draw circles on plot\n\n Args:\n plot (Tree): A Tree plot instance\n p: A node object or list of Node objects\n colors: Str or list of strs. Colors of the circles. Optional,\n defaults to 'g' (green)\n size (float): Size of the circles. Optional, defaults to 15\n xoff, yoff (float): X and Y offset. Optional, defaults to 0.\n\n \"\"\"\n points = _xy(plot, p)\n trans = offset_copy(\n plot.transData, fig=plot.figure, x=xoff, y=yoff, units='points'\n )\n\n col = CircleCollection(\n sizes=(pi*size*size*0.25,),\n offsets=points, facecolors=colors, transOffset=trans,\n edgecolors='none'\n )\n\n plot.add_collection(col)\n plot.figure.canvas.draw_idle()\n return col\n\ndef legend(plot, colors, labels, shape='rectangle', loc='upper left', **kwargs):\n \"\"\"\n RR: the MPL legend function has changed since this function has been\n written. This function currently does not work. -CZ\n \"\"\"\n if shape == 'circle':\n shapes = [ Circle((0.5,0.5), radius=1, fc=c) for c in colors ]\n #shapes = [ CircleCollection([10],facecolors=[c]) for c in colors ]\n else:\n shapes = [ Rectangle((0,0),1,1,fc=c,ec='none') for c in colors ]\n\n return Axes.legend(plot, shapes, labels, loc=loc, **kwargs)\n\ndef leafspace_triangles(plot, color='black', rca=0.5):\n \"\"\"\n RR: Using this function on the primates tree (straight from the newick file)\n gives error: 'Node' object has no attribute 'leafspace'. How do you give\n nodes the leafspace attribute? -CZ\n rca = relative crown age\n \"\"\"\n leaves = plot.root.leaves()\n leafspace = [ float(x.leafspace) for x in leaves ]\n #leafspace = array(raw_leafspace)/(sum(raw_leafspace)/float(len(leaves)))\n pv = []\n for i, n in enumerate(leaves):\n if leafspace[i] > 0:\n p = plot.n2c[n]\n pp = plot.n2c[n.parent]\n spc = leafspace[i]\n yoff = spc/2.0\n x0 = pp.x + (p.x - pp.x)*rca\n verts = [(x0, p.y),\n (p.x, p.y-yoff),\n (p.x, p.y+yoff),\n (x0, p.y)]\n codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY]\n path = Path(verts, codes)\n patch = PathPatch(path, fc=color, lw=0)\n pv.append(plot.add_patch(patch))\n return pv\n\ndef text(plot, x, y, s, color='black', xoff=0, yoff=0, valign='center',\n halign='left', fontsize=10):\n \"\"\"\n Add text to the plot.\n\n Args:\n plot (Tree): A Tree plot instance\n x, y (float): x and y coordinates to place the text\n s (str): The text to write\n color (str): The color of the text. Optional, defaults to \"black\"\n xoff, yoff (float): x and y offset\n valign (str): Vertical alignment. Can be: 'center', 'top',\n 'bottom', or 'baseline'. Defaults to 'center'.\n halign (str): Horizontal alignment. Can be: 'center', 'right',\n or 'left'. Defaults to 'left'\n fontsize (float): Font size. Optional, defaults to 10\n\n \"\"\"\n txt = plot.annotate(\n s, xy=(x, y),\n xytext=(xoff, yoff),\n textcoords=\"offset points\",\n verticalalignment=valign,\n horizontalalignment=halign,\n fontsize=fontsize,\n clip_on=True,\n picker=True\n )\n txt.set_visible(True)\n return txt\n", "ivy/vis/tree.py": "\"\"\"\ninteractive viewers for trees, etc. using matplotlib\n\"\"\"\nimport sys, time, bisect, math, types, os, operator\nfrom collections import defaultdict\nfrom itertools import chain\nfrom pprint import pprint\nfrom .. import tree, bipart\nfrom ..layout import cartesian\nfrom ..storage import Storage\nfrom .. import pyperclip as clipboard\n#from ..nodecache import NodeCache\nimport matplotlib, numpy\nimport matplotlib.pyplot as pyplot\nfrom matplotlib.figure import SubplotParams, Figure\nfrom matplotlib.axes import Axes, subplot_class_factory\nfrom matplotlib.patches import PathPatch, Rectangle, Arc\nfrom matplotlib.path import Path\nfrom matplotlib.widgets import RectangleSelector\nfrom matplotlib.transforms import Bbox, offset_copy, IdentityTransform, \\\n Affine2D\nfrom matplotlib import cm as mpl_colormap\nfrom matplotlib import colors as mpl_colors\nfrom matplotlib.colorbar import Colorbar\nfrom matplotlib.collections import RegularPolyCollection, LineCollection, \\\n PatchCollection\nfrom matplotlib.lines import Line2D\ntry:\n from matplotlib.offsetbox import OffsetImage, AnnotationBbox\nexcept ImportError:\n pass\nfrom matplotlib._png import read_png\nfrom matplotlib.ticker import MaxNLocator, FuncFormatter, NullLocator\nfrom mpl_toolkits.axes_grid.anchored_artists import AnchoredText\nfrom mpl_toolkits.axes_grid1.inset_locator import inset_axes\nimport symbols, colors\nimport hardcopy as HC\ntry:\n import Image\nexcept ImportError:\n from PIL import Image\n\n#matplotlib.rcParams['path.simplify'] = False\n\n_tango = colors.tango()\nclass TreeFigure(object):\n \"\"\"\n Window for showing a single tree, optionally with split overview\n and detail panes.\n\n The navigation toolbar at the bottom is provided by matplotlib\n (http://matplotlib.sf.net/users/navigation_toolbar.html). Its\n pan/zoom button and zoom-rectangle button provide different modes\n of mouse interaction with the figure. When neither of these\n buttons are checked, the default mouse bindings are as follows:\n\n * button 1 drag: select nodes - retrieve by calling fig.selected\n * button 3 drag: pan view\n * scroll up/down: zoom in/out\n * scroll up/down with Control key: zoom y-axis\n * scroll up/down with Shift key: zoom x-axis\n * scroll up/down with 'd' key: pan view up/down\n * scroll up/down with 'e' key: pan view left/right\n * click on overview will center the detail pane on that region\n\n Default keybindings:\n\n * t: zoom out to full extent\n * +/-: zoom in/out\n\n Useful attributes and methods (assume an instance named *fig*):\n\n * fig.root - the root node (see [Node methods])\n * fig.highlight(s) - highlight and trace nodes with substring *s*\n * fig.zoom_clade(anc) - zoom to view node *anc* and all its descendants\n * fig.toggle_overview() - toggle visibility of the overview pane\n * fig.toggle_branchlabels() - ditto for branch labels\n * fig.toggle_leaflabels() - ditto for leaf labels\n * fig.decorate(func) - decorate the tree with a function (see\n :ref:`decorating TreeFigures `)\n \"\"\"\n def __init__(self, data, name=None, scaled=True, div=0.25,\n branchlabels=True, leaflabels=True, mark_named=True,\n highlight_support=True, xoff=0, yoff=0,\n overview=True, radial=False):\n self.overview = None\n self.overview_width = div\n self.dataplot = None\n self.dataplot_width = 0.25\n self.name = name\n self.scaled = scaled\n self.branchlabels = branchlabels\n self.leaflabels = leaflabels\n self.mark_named = mark_named\n self.xoff = xoff\n self.yoff = yoff\n self.radial = radial\n if radial:\n self.leaflabels = False\n self.highlighted = set()\n self.highlight_support = highlight_support\n if isinstance(data, tree.Node):\n root = data\n else:\n root = tree.read(data)\n self.root = root\n if not self.root:\n raise IOError, \"cannot coerce data into tree.Node\"\n self.name = self.name or root.treename\n pars = SubplotParams(\n left=0, right=1, bottom=0.05, top=1, wspace=0.01\n )\n fig = pyplot.figure(subplotpars=pars, facecolor=\"white\")\n connect_events(fig.canvas)\n self.figure = fig\n self.initialize_subplots(overview)\n self.home()\n\n def initialize_subplots(self, overview=True):\n if not self.radial:\n tp = TreePlot(self.figure, 1, 2, 2, app=self, name=self.name,\n scaled=self.scaled, branchlabels=self.branchlabels,\n highlight_support=self.highlight_support,\n leaflabels=self.leaflabels,\n mark_named=self.mark_named)\n detail = self.figure.add_subplot(tp)\n detail.set_root(self.root)\n detail.plot_tree()\n self.detail = detail\n tp = OverviewTreePlot(\n self.figure, 121, app=self, scaled=self.scaled,\n branchlabels=False, leaflabels=False,\n mark_named=self.mark_named,\n highlight_support=self.highlight_support,\n target=self.detail\n )\n ov = self.figure.add_subplot(tp)\n ov.set_root(self.root)\n ov.plot_tree()\n self.overview = ov\n if not overview:\n self.toggle_overview(False)\n self.set_positions()\n\n if self.detail.nleaves < 50:\n self.toggle_overview(False)\n else:\n tp = RadialTreePlot(\n self.figure, 111, app=self, name=self.name,\n scaled=self.scaled, branchlabels=self.branchlabels,\n highlight_support=self.highlight_support,\n leaflabels=self.leaflabels, mark_named=self.mark_named\n )\n ax2 = self.figure.add_subplot(tp)\n ax2.set_root(self.root)\n ax2.plot_tree()\n self.detail = ax2\n\n def __get_selected_nodes(self):\n return list(self.detail.selected_nodes)\n\n def __set_selected_nodes(self, nodes):\n self.detail.select_nodes(nodes)\n\n def __del_selected_nodes(self):\n self.detail.select_nodes(None)\n\n selected = property(__get_selected_nodes,\n __set_selected_nodes,\n __del_selected_nodes)\n\n ## def selected_nodes(self):\n ## return self.detail.selected_nodes\n\n @property\n def axes(self):\n return self.detail\n\n def add(self, data, name=None, support=70,\n branchlabels=False, leaflabels=True, mark_named=True):\n \"\"\"\n Add a new tree in a new window\n\n Args:\n data: A node object or tree file.\n name (str): Name of the plot. Defaults to None\n branchlabels (bool): Whether or not to draw branch labels.\n Defaults to False\n leaflabels (bool): Whether or not to draw leaf labels.\n Defaults to True\n \"\"\"\n newfig = MultiTreeFigure()\n ## newfig.add(self.root, name=self.name, support=self.support,\n ## branchlabels=self.branchlabels)\n newfig.add(data, name=name, support=support,\n branchlabels=branchlabels,\n leaflabels=leaflabels,\n mark_named=mark_named)\n return newfig\n\n def toggle_leaflabels(self):\n \"\"\"\n Toggle leaf labels and redraw tree\n \"\"\"\n self.leaflabels = not self.leaflabels\n self.detail.leaflabels = self.leaflabels\n self.redraw()\n\n def toggle_branchlabels(self):\n \"\"\"\n Toggle branch labels and redraw tree\n \"\"\"\n self.branchlabels = not self.branchlabels\n self.detail.branchlabels = self.branchlabels\n self.redraw()\n\n def toggle_overview(self, val=None):\n \"\"\"\n Toggle overview\n \"\"\"\n if val is None:\n if self.overview.get_visible():\n self.overview.set_visible(False)\n self.overview_width = 0.001\n else:\n self.overview.set_visible(True)\n self.overview_width = 0.25\n elif val:\n self.overview.set_visible(True)\n self.overview_width = val\n else:\n self.overview.set_visible(False)\n self.overview_width = 0.001\n self.set_positions()\n\n def set_scaled(self, scaled):\n \"\"\"\n RR: Using this method gives the error:\n redraw takes exactly 1 argument(2 given)-CZ\n Define whether or not the tree is scaled and redraw tree\n\n Args:\n scaled (bool): Whether or not the tree is scaled.\n \"\"\"\n for p in self.overview, self.detail:\n p.redraw(p.set_scaled(scaled))\n self.set_positions()\n\n def on_nodes_selected(self, treeplot):\n pass\n\n def picked(self, e):\n try:\n if e.mouseevent.button==1:\n s = e.artist.get_text()\n clipboard.copy(s)\n print s\n sys.stdout.flush()\n except:\n pass\n\n def ladderize(self, rev=False):\n \"\"\"\n Ladderize and redraw the tree\n \"\"\"\n self.root.ladderize(rev)\n self.redraw()\n\n def show(self):\n \"\"\"\n Plot the figure in a new window\n \"\"\"\n self.figure.show()\n\n def set_positions(self):\n ov = self.overview\n p = self.detail\n dp = self.dataplot\n height = 1.0-p.xoffset()\n if ov:\n box = [0, p.xoffset(), self.overview_width, height]\n ov.set_position(box)\n w = 1.0\n if ov:\n w -= self.overview_width\n if dp:\n w -= self.dataplot_width\n p.set_position([self.overview_width, p.xoffset(), w, height])\n if dp:\n box = [1.0-self.dataplot_width, p.xoffset(),\n self.dataplot_width, height]\n dp.set_position(box)\n self.figure.canvas.draw_idle()\n\n ## def div(self, v=0.3):\n ## assert 0 <= v < 1\n ## self.overview_width = v\n ## self.set_positions()\n ## self.figure.canvas.draw_idle()\n\n def add_dataplot(self):\n \"\"\"\n Add new plot to the side of existing plot\n \"\"\"\n np = 3 if self.overview else 2\n if self.dataplot:\n self.figure.delaxes(self.dataplot)\n self.dataplot = self.figure.add_subplot(1, np, np, sharey=self.detail)\n # left, bottom, width, height (proportions)\n dleft, dbottom, dwidth, dheight = self.detail.get_position().bounds\n # give the dataplot one-quarter the width of the detail axes\n w = dwidth * 0.25\n self.detail.set_position([dleft, dbottom, dwidth-w, dheight])\n self.dataplot.set_position([1-w, dbottom, w, dheight])\n self.dataplot.xaxis.set_visible(False)\n self.dataplot.yaxis.set_visible(False)\n for x in self.dataplot.spines.values():\n x.set_visible(False)\n self.figure.canvas.draw_idle()\n return self.dataplot\n\n def redraw(self):\n \"\"\"\n Replot the figure and overview\n \"\"\"\n self.detail.redraw()\n if self.overview: self.overview.redraw()\n self.highlight()\n self.set_positions()\n self.figure.canvas.draw_idle()\n\n def find(self, x):\n \"\"\"\n Find nodes\n\n Args:\n x (str): String to search\n Returns:\n list: A list of node objects found with the Node findall() method\n \"\"\"\n return self.root.findall(x)\n\n def hlines(self, nodes, width=5, color=\"red\", xoff=0, yoff=0):\n \"\"\"\n Highlight nodes\n\n Args:\n nodes (list): A list of node objects\n width (float): Width of highlighted lines. Defaults to 5\n color (str): Color of highlighted lines. Defaults to red\n xoff (float): Number of units to offset lines by. Defaults to 0\n yoff (float): Number of units to offset lines by. Defaults to 0\n \"\"\"\n self.overview.hlines(nodes, width=width, color=color,\n xoff=xoff, yoff=yoff)\n self.detail.hlines(nodes, width=width, color=color,\n xoff=xoff, yoff=yoff)\n\n def highlight(self, x=None, width=5, color=\"red\"):\n \"\"\"\n Highlight nodes\n\n Args:\n x: Str or list of Strs or Node or list of Nodes\n width (float): Width of highlighted lines. Defaults to 5\n color (str): Color of highlighted lines. Defaults to red\n \"\"\"\n if x:\n nodes = set()\n if type(x) in types.StringTypes:\n nodes = self.root.findall(x)\n elif isinstance(x, tree.Node):\n nodes = set(x)\n else:\n for n in x:\n if type(n) in types.StringTypes:\n found = self.root.findall(n)\n if found:\n nodes |= set(found)\n elif isinstance(n, tree.Node):\n nodes.add(n)\n\n self.highlighted = nodes\n else:\n self.highlighted = set()\n if self.overview:\n self.overview.highlight(self.highlighted, width=width, color=color)\n self.detail.highlight(self.highlighted, width=width, color=color)\n self.figure.canvas.draw_idle()\n\n def home(self):\n \"\"\"\n Return plot to initial size and location.\n \"\"\"\n if self.overview: self.overview.home()\n self.detail.home()\n\n def zoom_clade(self, x):\n \"\"\"\n Zoom to fit a node *x* and all its descendants in the view.\n\n Args:\n x: Node or str that matches the label of a node\n \"\"\"\n if not isinstance(x, tree.Node):\n x = self.root[x]\n self.detail.zoom_clade(x)\n\n def zoom(self, factor=0.1):\n \"\"\"Zoom both axes by *factor* (relative display size).\"\"\"\n self.detail.zoom(factor, factor)\n self.figure.canvas.draw_idle()\n\n def zx(self, factor=0.1):\n \"\"\"Zoom x axis by *factor*.\"\"\"\n self.detail.zoom(factor, 0)\n self.figure.canvas.draw_idle()\n\n def zy(self, factor=0.1):\n \"\"\"Zoom y axis by *factor*.\"\"\"\n self.detail.zoom(0, factor)\n self.figure.canvas.draw_idle()\n\n def decorate(self, func, *args, **kwargs):\n \"\"\"\n Decorate the tree.\n\n Args:\n func (function): A function that takes a TreePlot instance as the\n first parameter, and *args* and *kwargs* as additional\n parameters. It adds boxes, circles, etc to the TreePlot.\n\n Notes:\n If *kwargs* contains the key-value pair ('store', *name*),\n then the function is stored as *name* and re-called every time\n the TreePlot is redrawn, i.e., the decoration is persistent.\n Use ``rmdec(name)`` to remove the decorator from the treeplot.\n \"\"\"\n self.detail.decorate(func, *args, **kwargs)\n\n def rmdec(self, name):\n \"Remove the decoration 'name'.\"\n self.detail.rmdec(name)\n ## if name in self.detail.decorators:\n ## del self.detail.decorators[name]\n\n def cbar(self, node, width=6, color='blue', mrca = True):\n pass\n # self.axes.cbar(nodes = node, width = width, color = color, mrca = mrca)\n\n def unclutter(self, *args):\n self.detail.unclutter()\n\n def trace_branches(self, nodes, width=4, color=\"blue\"):\n \"\"\"\n RR: What is the difference between this and highlight? -CZ\n \"\"\"\n for p in self.overview, self.detail:\n p.trace_branches(nodes, width, color)\n\n def plot_continuous(self, *args, **kwargs):\n self.detail.plot_continuous(*args, **kwargs)\n\n def hardcopy(self, fname=None, relwidth=None, leafpad=1.5):\n if not relwidth:\n bbox = self.detail.get_tightbbox(self.figure.canvas.get_renderer())\n relwidth = bbox.width/bbox.height\n f = self.detail.hardcopy(\n relwidth=relwidth,\n leafpad=leafpad\n )\n f.axes.home()\n #f.axes.set_xlim(*self.detail.get_xlim())\n #f.axes.set_ylim(*self.detail.get_ylim())\n if fname:\n f.savefig(fname)\n return f\n\n def select_nodes(self, nodes=None):\n \"\"\"\n Select nodes on the plot\n\n Args:\n nodes: A node or list of ndoes\n Notes:\n If only one node is given, all of the node's ancestors are\n also selected. If a list of nodes is given (even if it has only\n one node), only the given node(s) are selected.\n \"\"\"\n self.detail.select_nodes(nodes)\n\n def decorate(self, func, *args, **kwargs): # RR: is this repeated from above? -CZ\n self.detail.decorate(func, *args, **kwargs)\n\n ## def dataplot(self):\n ## ax = self.figure.add_subplot(133, sharey=self.detail)\n ## ax.yaxis.set_visible(False)\n ## self.dataplot = ax\n ## return ax\n\n def attach_alignment(self, aln, overview=True):\n \"leaf labels expected to be sequence ids\"\n from Bio.Align import MultipleSeqAlignment\n from Bio.Seq import Seq\n from Bio.SeqRecord import SeqRecord\n from Bio.Alphabet import IUPAC\n from alignment import AlignmentFigure, AlignmentPlot\n if not isinstance(aln, MultipleSeqAlignment):\n from .. import align\n aln = align.read(aln)\n d = dict([ (x.id,x) for x in aln ])\n emptyseq = Seq('-'*aln.get_alignment_length(),\n alphabet=IUPAC.ambiguous_dna)\n aln = MultipleSeqAlignment(\n [ d.get(x.label) or SeqRecord(emptyseq, id=x.label)\n for x in self.root.leaves() ]\n )\n self.aln = aln\n p = AlignmentPlot(self.figure, 133, aln=aln, app=self,\n sharey=self.detail, showy=False)\n self.alnplot = Storage()\n self.alnplot.detail = self.figure.add_subplot(p)\n detail = self.alnplot.detail\n detail.plot_aln()\n if overview:\n self.alnplot.overview = inset_axes(\n detail, width=\"30%\", height=\"20%\", loc=1\n )\n overview = self.alnplot.overview\n overview.xaxis.set_major_locator(NullLocator())\n overview.yaxis.set_major_locator(NullLocator())\n overview.imshow(\n detail.array, interpolation='nearest', aspect='auto',\n origin='lower'\n )\n rect = UpdatingRect(\n [0,0], 0, 0, facecolor='black', edgecolor='cyan', alpha=0.5\n )\n overview.zoomrect = rect\n rect.target = detail\n detail.callbacks.connect('xlim_changed', rect)\n detail.callbacks.connect('ylim_changed', rect)\n overview.add_patch(rect)\n rect(overview)\n self.toggle_overview(False)\n xoff = self.detail.xoffset()\n self.detail.set_position([0, xoff, 0.3, 1.0-xoff])\n p.set_position([0.3, xoff, 0.7, 1.0-xoff])\n\n\nclass MultiTreeFigure(object):\n \"\"\"\n Window for showing multiple trees side-by-side.\n\n TODO: document this\n \"\"\"\n def __init__(self, trees=None, name=None, support=70,\n scaled=True, branchlabels=False, radial=False):\n \"\"\"\n *trees* are assumed to be objects suitable for passing to\n ivy.tree.read()\n \"\"\"\n self.root = []\n self.name = name\n self.name2plot = {}\n self.plot = []\n self.scaled = scaled\n self.branchlabels = branchlabels\n self.radial = radial\n self.highlighted = set()\n self.divs = []\n pars = SubplotParams(\n left=0, right=1, bottom=0.05, top=1, wspace=0.04\n )\n fig = pyplot.figure(subplotpars=pars)\n connect_events(fig.canvas)\n self.figure = fig\n\n for x in trees or []:\n self.add(x, support=support, scaled=scaled,\n branchlabels=branchlabels)\n\n def on_nodes_selected(self, treeplot):\n pass\n\n def clear(self):\n self.root = []\n self.name2plot = {}\n self.highlighted = set()\n self.divs = []\n self.figure.clf()\n\n def picked(self, e):\n try:\n if e.mouseevent.button==1:\n print e.artist.get_text()\n sys.stdout.flush()\n except:\n pass\n\n def getplot(self, x):\n p = None\n try:\n i = self.root.index(x)\n return self.plot[i]\n except ValueError:\n return self.name2plot.get(x)\n\n def add(self, data, name=None, support=70, scaled=True,\n branchlabels=False, leaflabels=True, mark_named=True):\n root = None\n if isinstance(data, tree.Node):\n root = data\n else:\n root = tree.read(data)\n if not root:\n raise IOError, \"cannot coerce data into tree.Node\"\n\n name = name or root.treename\n self.root.append(root)\n\n fig = self.figure\n N = len(self.plot)+1\n for i, p in enumerate(self.plot):\n p.change_geometry(1, N, i+1)\n plt = TreePlot(fig, 1, N, N, app=self, name=name, support=support,\n scaled=scaled, branchlabels=branchlabels,\n leaflabels=leaflabels, mark_named=mark_named)\n p = fig.add_subplot(plt)\n p.set_root(root)\n p.plot_tree()\n p.index = N-1\n self.plot.append(p)\n if name:\n assert name not in self.name2plot\n self.name2plot[name] = p\n\n ## global IP\n ## if IP:\n ## def f(shell, s):\n ## self.highlight(s)\n ## return sorted([ x.label for x in self.highlighted ])\n ## IP.expose_magic(\"highlight\", f)\n ## def f(shell, s):\n ## self.root.ladderize()\n ## self.redraw()\n ## IP.expose_magic(\"ladderize\", f)\n ## def f(shell, s):\n ## self.show()\n ## IP.expose_magic(\"show\", f)\n ## def f(shell, s):\n ## self.redraw()\n ## IP.expose_magic(\"redraw\", f)\n return p\n\n def show(self):\n self.figure.show()\n\n def redraw(self):\n for p in self.plot:\n p.redraw()\n self.figure.canvas.draw_idle()\n\n def ladderize(self, reverse=False):\n for n in self.root:\n n.ladderize(reverse)\n self.redraw()\n\n def highlight(self, s=None, add=False, width=5, color=\"red\"):\n \"\"\"\n Highlight nodes\n\n Args:\n s: Str or list of Strs or Node or list of Nodes\n add (bool): Whether to add to existing highlighted nodes or\n overwrite them.\n width (float): Width of highlighted lines. Defaults to 5\n color (str): Color of highlighted lines. Defaults to red\n \"\"\"\n if not s:\n self.highlighted = set()\n if not add:\n self.highlighted = set()\n\n nodesets = [ p.root.findall(s) for p in self.plot ]\n\n for nodes, p in zip(nodesets, self.plot):\n if nodes:\n p.highlight(nodes, width=width, color=color)\n else:\n p.highlight()\n\n self.highlighted = nodesets\n self.figure.canvas.draw_idle()\n\n ## for root in self.root:\n ## for node in root.iternodes():\n ## if node.label and (s in node.label):\n ## self.highlighted.add(node)\n ## self.highlight()\n\n def home(self):\n for p in self.plot: p.home()\n\n\ndef connect_events(canvas):\n mpl_connect = canvas.mpl_connect\n mpl_connect(\"button_press_event\", onclick)\n mpl_connect(\"button_release_event\", onbuttonrelease)\n mpl_connect(\"scroll_event\", onscroll)\n mpl_connect(\"pick_event\", onpick)\n mpl_connect(\"motion_notify_event\", ondrag)\n mpl_connect(\"key_press_event\", onkeypress)\n mpl_connect(\"axes_enter_event\", axes_enter)\n mpl_connect(\"axes_leave_event\", axes_leave)\n\nclass UpdatingRect(Rectangle):\n def __call__(self, p):\n self.set_bounds(*p.viewLim.bounds)\n p.figure.canvas.draw_idle()\n\nclass Tree(Axes):\n \"\"\"\n matplotlib.axes.Axes subclass for rendering trees.\n \"\"\"\n def __init__(self, fig, rect, *args, **kwargs):\n self.root = None\n self.app = kwargs.pop(\"app\", None)\n self.support = kwargs.pop(\"support\", 70.0)\n self.scaled = kwargs.pop(\"scaled\", True)\n self.leaflabels = kwargs.pop(\"leaflabels\", True)\n self.branchlabels = kwargs.pop(\"branchlabels\", True)\n self._mark_named = kwargs.pop(\"mark_named\", True)\n self.name = None\n self.leaf_fontsize = kwargs.pop(\"leaf_fontsize\", 10)\n self.branch_fontsize = kwargs.pop(\"branch_fontsize\", 10)\n self.branch_width = kwargs.pop(\"branch_width\", 1)\n self.branch_color = kwargs.pop(\"branch_color\", \"black\")\n self.interactive = kwargs.pop(\"interactive\", True)\n self.decorators = kwargs.pop(\"decorators\", [])\n ## if self.decorators:\n ## print >> sys.stderr, \"got %s decorators\" % len(self.decorators)\n self.xoff = kwargs.pop(\"xoff\", 0)\n self.yoff = kwargs.pop(\"yoff\", 0)\n self.highlight_support = kwargs.pop(\"highlight_support\", True)\n self.smooth_xpos = kwargs.pop(\"smooth_xpos\", 0)\n Axes.__init__(self, fig, rect, *args, **kwargs)\n self.nleaves = 0\n self.highlighted = None\n self.highlightpatch = None\n self.pan_start = None\n if not self.decorators:\n self.decorators = [\n (\"__selected_nodes__\", (Tree.highlight_selected_nodes, [], {}))\n ]\n self.name2dec = dict([ (x[0], i) for i, x in\n enumerate(self.decorators) ])\n self._active = False\n\n if self.interactive:\n self.callbacks.connect(\"ylim_changed\", self.draw_labels)\n self.selector = RectangleSelector(self, self.rectselect,\n useblit=True)\n def f(e):\n if e.button != 1: return True\n else: return RectangleSelector.ignore(self.selector, e)\n self.selector.ignore = f\n self.xoffset_value = 0.05\n self.selected_nodes = set()\n self.leaf_offset = 4\n self.leaf_valign = \"center\"\n self.leaf_halign = \"left\"\n self.branch_offset = -5\n self.branch_valign = \"center\"\n self.branch_halign = \"right\"\n\n self.spines[\"top\"].set_visible(False)\n self.spines[\"left\"].set_visible(False)\n self.spines[\"right\"].set_visible(False)\n self.xaxis.set_ticks_position(\"bottom\")\n\n def p2y(self):\n \"Convert a single display point to y-units\"\n transform = self.transData.inverted().transform\n return transform([0,1])[1] - transform([0,0])[1]\n\n def p2x(self):\n \"Convert a single display point to y-units\"\n transform = self.transData.inverted().transform\n return transform([0,0])[1] - transform([1,0])[1]\n\n def decorate(self, func, *args, **kwargs):\n \"\"\"\n Decorate the tree with function *func*. If *kwargs* contains\n the key-value pair ('store', *name*), the decorator function\n is stored in self.decorators and called upon every redraw.\n \"\"\"\n name = kwargs.pop(\"store\", None)\n if name:\n if name in self.name2dec:\n i = self.name2dec[name]\n self.decorators[i] = (name, (func, args, kwargs))\n else:\n self.decorators.append((name, (func, args, kwargs)))\n self.name2dec = dict([ (x[0], i) for i, x in\n enumerate(self.decorators) ])\n\n func(self, *args, **kwargs)\n\n def rmdec(self, name):\n if name in self.name2dec:\n i = self.name2dec[name]\n del self.decorators[i]\n self.name2dec = dict([ (x[0], i) for i, x in\n enumerate(self.decorators) ])\n\n\n def flip(self):\n \"\"\"\n Reverse the direction of the x-axis.\n \"\"\"\n self.leaf_offset *= -1\n self.branch_offset *= -1\n ha = self.leaf_halign\n self.leaf_halign = \"right\" if ha == \"left\" else \"left\"\n ha = self.branch_halign\n self.branch_halign = \"right\" if ha == \"left\" else \"left\"\n self.invert_xaxis()\n self.redraw()\n\n def xoffset(self):\n \"\"\"Space below x axis to show tick labels.\"\"\"\n if self.scaled:\n return self.xoffset_value\n else:\n return 0\n\n def save_newick(self, filename):\n \"\"\"\n Save tree as a newick file.\n\n Args:\n filename (str): Path to file.\n\n \"\"\"\n if os.path.exists(filename):\n s = raw_input(\"File %s exists, enter 'y' to overwrite \").strip()\n if (s and s.lower() != 'y') or (not s):\n return\n import newick\n f = file(filename, \"w\")\n f.write(newick.string(self.root))\n f.close()\n\n def set_scaled(self, scaled):\n flag = self.scaled != scaled\n self.scaled = scaled\n return flag\n\n def cbar(self, nodes, color=None, label=None, x=None, width=8, xoff=10,\n showlabel=True, mrca=True):\n \"\"\"\n Draw a 'clade' bar (i.e., along the y-axis) indicating a\n clade. *nodes* are assumed to be one or more nodes in the\n tree. If just one, it should be the internal node\n representing the clade of interest; otherwise, the clade of\n interest is the most recent common ancestor of the specified\n nodes. *label* is an optional string to be drawn next to the\n bar, *offset* by the specified number of display units. If\n *label* is ``None`` then the clade's label is used instead.\n\n Args:\n nodes: Node or list of nodes\n color (str): Color of the bar. Optional, defaults to None.\n label (str): Optional label for bar. If None, the clade's\n label is used instead. Defaults to None.\n width (float): Width of bar\n xoff (float): Offset from label to bar\n showlabel (bool): Whether or not to draw the label\n mrca: RR: Not quite sure what this does -CZ\n\n \"\"\"\n xlim = self.get_xlim(); ylim = self.get_ylim()\n if color is None: color = _tango.next()\n transform = self.transData.inverted().transform\n\n if mrca:\n if isinstance(nodes, tree.Node):\n spec = nodes\n elif type(nodes) in types.StringTypes:\n spec = self.root.get(nodes)\n else:\n spec = self.root.mrca(nodes)\n\n assert spec in self.root\n label = label or spec.label\n leaves = spec.leaves()\n\n else:\n leaves = nodes\n\n n2c = self.n2c\n\n y = sorted([ n2c[n].y for n in leaves ])\n ymin = y[0]; ymax = y[-1]; y = (ymax+ymin)*0.5\n\n if x is None:\n x = max([ n2c[n].x for n in leaves ])\n _x = 0\n for lf in leaves:\n txt = self.node2label.get(lf)\n if txt and txt.get_visible():\n _x = max(_x, transform(txt.get_window_extent())[1,0])\n if _x > x: x = _x\n\n v = sorted(list(transform(((0,0),(xoff,0)))[:,0]))\n xoff = v[1]-v[0]\n x += xoff\n\n Axes.plot(self, [x,x], [ymin, ymax], '-', linewidth=width, color=color)\n\n if showlabel and label:\n xo = self.leaf_offset\n if xo > 0:\n xo += width*0.5\n else:\n xo -= width*0.5\n txt = self.annotate(\n label,\n xy=(x, y),\n xytext=(xo, 0),\n textcoords=\"offset points\",\n verticalalignment=self.leaf_valign,\n horizontalalignment=self.leaf_halign,\n fontsize=self.leaf_fontsize,\n clip_on=True,\n picker=False\n )\n\n self.set_xlim(xlim); self.set_ylim(ylim)\n\n def anctrace(self, anc, descendants=None, width=4, color=\"blue\"):\n \"\"\"\n RR: This function gives me a 'list index out of range' error\n when I try to use it -CZ\n \"\"\"\n if not descendants:\n descendants = anc.leaves()\n else:\n for d in descendants:\n assert d in anc\n\n nodes = []\n for d in descendants:\n v = d.rootpath(anc)\n if v:\n nodes.extend(v)\n nodes = set(nodes)\n nodes.remove(anc)\n self.trace_branches(nodes, width, color)\n\n def trace_branches(self, nodes, width=4, color=\"blue\"):\n n2c = self.n2c\n M = Path.MOVETO; L = Path.LINETO\n verts = []\n codes = []\n for c, pc in [ (n2c[x], n2c[x.parent]) for x in nodes\n if (x in n2c) and x.parent ]:\n x = c.x; y = c.y\n px = pc.x; py = pc.y\n verts.append((x, y)); codes.append(M)\n verts.append((px, y)); codes.append(L)\n verts.append((px, py)); codes.append(L)\n px, py = verts[-1]\n verts.append((px, py)); codes.append(M)\n\n p = PathPatch(Path(verts, codes), fill=False,\n linewidth=width, edgecolor=color)\n self.add_patch(p)\n self.figure.canvas.draw_idle()\n return p\n\n def highlight_selected_nodes(self, color=\"green\"):\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n get = self.n2c.get\n coords = filter(None, [ get(n) for n in self.selected_nodes ])\n x = [ c.x for c in coords ]\n y = [ c.y for c in coords ]\n if x and y:\n self.__selected_highlight_patch = self.scatter(x, y, s=60, c=color,\n zorder=100)\n self.set_xlim(xlim)\n self.set_ylim(ylim)\n self.figure.canvas.draw_idle()\n\n def select_nodes(self, nodes=None, add=False):\n try:\n self.__selected_highlight_patch.remove()\n self.figure.canvas.draw_idle()\n except:\n pass\n if add:\n if nodes:\n self.selected_nodes = self.selected_nodes | nodes\n if hasattr(self, \"app\") and self.app:\n self.app.on_nodes_selected(self)\n self.highlight_selected_nodes()\n else:\n if nodes:\n self.selected_nodes = nodes\n if hasattr(self, \"app\") and self.app:\n self.app.on_nodes_selected(self)\n self.highlight_selected_nodes()\n else:\n self.selected_nodes = set()\n\n def rectselect(self, e0, e1):\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n s = set()\n x0, x1 = sorted((e0.xdata, e1.xdata))\n y0, y1 = sorted((e0.ydata, e1.ydata))\n add = e0.key == 'shift'\n for n, c in self.n2c.items():\n if (x0 < c.x < x1) and (y0 < c.y < y1):\n s.add(n)\n self.select_nodes(nodes = s, add = add)\n self.set_xlim(xlim)\n self.set_ylim(ylim)\n ## if s:\n ## print \"Selected:\"\n ## for n in s:\n ## print \" \", n\n\n def picked(self, e):\n if hasattr(self, \"app\") and self.app:\n self.app.picked(e)\n\n def window2data(self, expandx=1.0, expandy=1.0):\n \"\"\"\n return the data coordinates ((x0, y0),(x1, y1)) of the plot\n window, expanded by relative units of window size\n \"\"\"\n bb = self.get_window_extent()\n bbx = bb.expanded(expandx, expandy)\n return self.transData.inverted().transform(bbx.get_points())\n\n def get_visible_nodes(self, labeled_only=False):\n ## transform = self.transData.inverted().transform\n ## bb = self.get_window_extent()\n ## bbx = bb.expanded(1.1,1.1)\n ## ((x0, y0),(x1, y1)) = transform(bbx.get_points())\n ((x0, y0),(x1, y1)) = self.window2data(1.1, 1.1)\n #print \"visible_nodes points\", x0, x1, y0, y1\n\n if labeled_only:\n def f(v): return (y0 < v[0] < y1) and (v[2] in self.node2label)\n else:\n def f(v): return (y0 < v[0] < y1)\n for y, x, n in filter(f, self.coords):\n yield (n, x, y)\n\n def zoom_cxy(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view, with a fixed data point (cx, cy)\n \"\"\"\n transform = self.transData.inverted().transform\n xlim = self.get_xlim(); xmid = sum(xlim)*0.5\n ylim = self.get_ylim(); ymid = sum(ylim)*0.5\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = xmid-x0; deltay = ymid-y0\n cx = cx or xmid; cy = cy or ymid\n xoff = (cx-xmid)*x\n self.set_xlim(xmid-deltax+xoff, xmid+deltax+xoff)\n yoff = (cy-ymid)*y\n self.set_ylim(ymid-deltay+yoff, ymid+deltay+yoff)\n self.adjust_xspine()\n\n def zoom(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view.\n \"\"\"\n # get the function to convert display coordinates to data\n # coordinates\n transform = self.transData.inverted().transform\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = x0 - xlim[0]; deltay = y0 - ylim[0]\n self.set_xlim(xlim[0]+deltax, xlim[1]-deltax)\n self.set_ylim(ylim[0]+deltay, ylim[1]-deltay)\n self.adjust_xspine()\n\n def center_y(self, y):\n \"\"\"\n Center the y-axis of the canvas on the given y value\n \"\"\"\n ymin, ymax = self.get_ylim()\n yoff = (ymax - ymin) * 0.5\n self.set_ylim(y-yoff, y+yoff)\n self.adjust_xspine()\n\n def center_x(self, x, offset=0.3):\n \"\"\"\n Center the x-axis of the canvas on the given x value\n \"\"\"\n xmin, xmax = self.get_xlim()\n xspan = xmax - xmin\n xoff = xspan*0.5 + xspan*offset\n self.set_xlim(x-xoff, x+xoff)\n self.adjust_xspine()\n\n def center_node(self, node):\n \"\"\"\n Center the canvas on the given node\n \"\"\"\n c = self.n2c[node]\n y = c.y\n self.center_y(y)\n x = c.x\n self.center_x(x, 0.2)\n\n def do_highlight_support(self):\n \"\"\"\n TODO: reconfigure this, insert into self.decorators\n \"\"\"\n if self.support:\n lim = float(self.support)\n\n M = Path.MOVETO; L = Path.LINETO\n\n verts = []; codes = []\n segments = []\n def f(n):\n if n.isleaf or not n.parent: return False\n try: return float(n.label) >= lim\n except:\n try: return float(n.support) >= lim\n except: pass\n return False\n\n for node, coords in [ x for x in self.n2c.items() if f(x[0]) ]:\n x = coords.x; y = coords.y\n p = node.parent\n pcoords = self.n2c[p]\n px = pcoords.x; py = y\n if self.app and self.app.radial:\n pc = self.n2c[node.parent]; theta2 = pc.angle\n px = math.cos(math.radians(coords.angle))*pc.depth\n py = math.sin(math.radians(coords.angle))*pc.depth\n\n ## segments.append([(x, y),(px, y)])\n verts.append((x,y)); codes.append(M)\n verts.append((px,py)); codes.append(L)\n\n if verts:\n patch = PathPatch(Path(verts, codes), fill=False,\n linewidth=3, edgecolor='black')\n self.add_patch(patch)\n\n ## self.add_artist(Line2D(\n ## [x,px], [y,py], lw=3, solid_capstyle=\"butt\", color=\"black\"\n ## ))\n\n def hl(self, s):\n nodes = self.root.findall(s)\n if nodes:\n self.highlight(nodes)\n\n def hlines(self, nodes, width=5, color=\"red\", xoff=0, yoff=0):\n offset = IdentityTransform()\n segs = []; w = []; o = []\n for n in filter(lambda x:x.parent, nodes):\n c = self.n2c[n]; p = self.n2c[n.parent]\n segs.append(((p.x,c.y),(c.x,c.y)))\n w.append(width); o.append((xoff,yoff))\n lc = LineCollection(segs, linewidths=w, transOffset=offset, offsets=o)\n lc.set_color(color)\n Axes.add_collection(self, lc)\n ## self.drawstack.append((\"hlines\", [nodes], dict(width=width,\n ## color=color,\n ## xoff=xoff,\n ## yoff=yoff)))\n self.figure.canvas.draw_idle()\n return lc\n\n def hardcopy(self, relwidth=0.5, leafpad=1.5):\n p = HC.TreeFigure(self.root, relwidth=relwidth, leafpad=leafpad,\n name=self.name, support=self.support,\n leaf_fontsize=self.leaf_fontsize,\n branch_fontsize=self.branch_fontsize,\n branch_width=self.branch_width,\n branch_color=self.branch_color,\n highlight_support=self.highlight_support,\n branchlabels=self.branchlabels,\n decorators=self.decorators,\n leaflabels=self.leaflabels,\n mark_named=self._mark_named,\n xlim=self.get_xlim(),\n ylim=self.get_ylim())\n return p\n\n def highlight(self, nodes=None, width=5, color=\"red\"):\n if self.highlightpatch:\n try:\n self.highlightpatch.remove()\n except:\n pass\n if not nodes:\n return\n\n if len(nodes)>1:\n mrca = self.root.mrca(nodes)\n if not mrca:\n return\n else:\n mrca = list(nodes)[0]\n\n M = Path.MOVETO; L = Path.LINETO\n verts = []\n codes = []\n seen = set()\n for node, coords in [ x for x in self.n2c.items() if x[0] in nodes ]:\n x = coords.x; y = coords.y\n p = node.parent\n while p:\n pcoords = self.n2c[p]\n px = pcoords.x; py = pcoords.y\n if node not in seen:\n verts.append((x, y)); codes.append(M)\n verts.append((px, y)); codes.append(L)\n verts.append((px, py)); codes.append(L)\n seen.add(node)\n if p == mrca or node == mrca:\n break\n node = p\n coords = self.n2c[node]\n x = coords.x; y = coords.y\n p = node.parent\n px, py = verts[-1]\n verts.append((px, py)); codes.append(M)\n\n self.highlightpath = Path(verts, codes)\n self.highlightpatch = PathPatch(\n self.highlightpath, fill=False, linewidth=width, edgecolor=color,\n capstyle='round', joinstyle='round'\n )\n return self.add_patch(self.highlightpatch)\n\n def find(self, s):\n \"\"\"\n Find node(s) matching pattern s and zoom to node(s)\n \"\"\"\n nodes = list(self.root.find(s))\n if nodes:\n self.zoom_nodes(nodes)\n\n def zoom_nodes(self, nodes, border=1.2):\n y0, y1 = self.get_ylim(); x0, x1 = self.get_xlim()\n y0 = max(0, y0); y1 = min(1, y1)\n\n n2c = self.n2c\n v = [ n2c[n] for n in nodes ]\n ymin = min([ c.y for c in v ])\n ymax = max([ c.y for c in v ])\n xmin = min([ c.x for c in v ])\n xmax = max([ c.x for c in v ])\n bb = Bbox(((xmin,ymin), (xmax, ymax)))\n\n # convert data coordinates to display coordinates\n transform = self.transData.transform\n disp_bb = [Bbox(transform(bb))]\n for n in nodes:\n if n.isleaf:\n txt = self.node2label[n]\n if txt.get_visible():\n disp_bb.append(txt.get_window_extent())\n\n disp_bb = Bbox.union(disp_bb).expanded(border, border)\n\n # convert back to data coordinates\n points = self.transData.inverted().transform(disp_bb)\n x0, x1 = points[:,0]\n y0, y1 = points[:,1]\n self.set_xlim(x0, x1)\n self.set_ylim(y0, y1)\n\n def zoom_clade(self, anc, border=1.2):\n if anc.isleaf:\n self.center_node(anc)\n\n else:\n self.zoom_nodes(list(anc), border)\n\n def draw_leaf_labels(self, *args):\n leaves = list(filter(lambda x:x[0].isleaf,\n self.get_visible_nodes(labeled_only=True)))\n psep = self.leaf_pixelsep()\n fontsize = min(self.leaf_fontsize, max(psep, 8))\n n2l = self.node2label\n transform = self.transData.transform\n sub = operator.sub\n\n for n in leaves:\n n2l[n[0]].set_visible(False)\n\n # draw leaves\n leaves_drawn = []\n for n, x, y in leaves:\n txt = self.node2label[n]\n if not leaves_drawn:\n txt.set_visible(True)\n leaves_drawn.append(txt)\n self.figure.canvas.draw_idle()\n continue\n\n txt2 = leaves_drawn[-1]\n y0 = y; y1 = txt2.xy[1]\n sep = sub(*transform(([0,y0],[0,y1]))[:,1])\n if sep > fontsize:\n txt.set_visible(True)\n txt.set_size(fontsize)\n leaves_drawn.append(txt)\n self.figure.canvas.draw_idle()\n\n if leaves_drawn:\n leaves_drawn[0].set_size(fontsize)\n\n return fontsize\n\n def draw_labels(self, *args):\n fs = max(10, self.draw_leaf_labels())\n nodes = self.get_visible_nodes(labeled_only=True)\n ## print [ x[0].id for x in nodes ]\n branches = list(filter(lambda x:(not x[0].isleaf), nodes))\n n2l = self.node2label\n for n, x, y in branches:\n t = n2l[n]\n t.set_visible(True)\n t.set_size(fs)\n\n def unclutter(self, *args):\n nodes = self.get_visible_nodes(labeled_only=True)\n branches = list(filter(lambda x:(not x[0].isleaf), nodes))\n psep = self.leaf_pixelsep()\n n2l = self.node2label\n fontsize = min(self.leaf_fontsize*1.2, max(psep, self.leaf_fontsize))\n\n drawn = []\n for n, x, y in branches:\n txt = n2l[n]\n try:\n bb = txt.get_window_extent().expanded(2, 2)\n vis = True\n for n2 in reversed(drawn):\n txt2 = n2l[n2]\n if bb.overlaps(txt2.get_window_extent()):\n txt.set_visible(False)\n vis = False\n self.figure.canvas.draw_idle()\n break\n if vis:\n txt.set_visible(True)\n txt.set_size(fontsize)\n self.figure.canvas.draw_idle()\n drawn.append(n)\n except RuntimeError:\n pass\n ## txt.set_visible(True)\n ## txt.set_size(fontsize)\n ## drawn.append(n)\n ## self.figure.canvas.draw_idle()\n\n def leaf_pixelsep(self):\n y0, y1 = self.get_ylim()\n y0 = max(0, y0)\n y1 = min(self.nleaves, y1)\n display_points = self.transData.transform(((0, y0), (0, y1)))\n # height in pixels (visible y data extent)\n height = operator.sub(*reversed(display_points[:,1]))\n pixelsep = height/((y1-y0)/self.leaf_hsep)\n return pixelsep\n\n def ypp(self):\n y0, y1 = self.get_ylim()\n p0, p1 = self.transData.transform(((0, y0), (0, y1)))[:,1]\n return (y1-y0)/float(p1-p0)\n\n def draw_labels_old(self, *args):\n if self.nleaves:\n y0, y1 = self.get_ylim()\n y0 = max(0, y0); y1 = min(1, y1)\n\n display_points = self.transData.transform(((0, y0), (0, y1)))\n # height in pixels (visible y data extent)\n height = operator.sub(*reversed(display_points[:,1]))\n pixelsep = height/((y1-y0)/self.leaf_hsep)\n fontsize = min(max(pixelsep-2, 8), 12)\n\n if pixelsep >= 8:\n for node, txt in self.node2label.items():\n if node.isleaf:\n if self.leaflabels:\n c = self.n2c[node]\n x = c.x; y = c.y\n if (y0 < y < y1):\n txt.set_size(fontsize)\n txt.set_visible(True)\n else:\n if self.branchlabels:\n c = self.n2c[node]\n x = c.x; y = c.y\n if (y0 < y < y1):\n txt.set_size(fontsize)\n txt.set_visible(True)\n elif pixelsep >= 4:\n for node, txt in self.node2label.items():\n if node.isleaf:\n txt.set_visible(False)\n else:\n if self.branchlabels:\n c = self.n2c[node]\n x = c.x; y = c.y\n if (y0 < y < y1):\n txt.set_size(fontsize)\n txt.set_visible(True)\n else:\n for node, txt in self.node2label.items():\n txt.set_visible(False)\n self.figure.canvas.draw_idle()\n\n def redraw(self, home=False, layout=True):\n \"\"\"\n Replot the tree\n \"\"\"\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n self.cla()\n if layout:\n self.layout()\n self.plot_tree()\n if self.interactive:\n self.callbacks.connect(\"ylim_changed\", self.draw_labels)\n\n if home:\n self.home()\n else:\n self.set_xlim(*xlim)\n self.set_ylim(*ylim)\n\n def set_name(self, name):\n self.name = name\n if name:\n at = AnchoredText(\n self.name, loc=2, frameon=True,\n prop=dict(size=12, weight=\"bold\")\n )\n at.patch.set_linewidth(0)\n at.patch.set_facecolor(\"white\")\n at.patch.set_alpha(0.6)\n self.add_artist(at)\n return at\n\n def _path_to_parent(self, node):\n \"\"\"\n For use in drawing branches\n \"\"\"\n c = self.n2c[node]; x = c.x; y = c.y\n pc = self.n2c[node.parent]; px = pc.x; py = pc.y\n M = Path.MOVETO; L = Path.LINETO\n verts = [(x, y), (px, y), (px, py)]\n codes = [M, L, L]\n return verts, codes\n ## return [PathPatch(Path(verts, codes), fill=False,\n ## linewidth=width or self.branch_width,\n ## edgecolor=color or self.branch_color)]\n\n\n def layout(self):\n self.n2c = cartesian(self.root, scaled=self.scaled, yunit=1.0,\n smooth=self.smooth_xpos)\n for c in self.n2c.values():\n c.x += self.xoff; c.y += self.yoff\n sv = sorted([\n [c.y, c.x, n] for n, c in self.n2c.items()\n ])\n self.coords = sv#numpy.array(sv)\n ## n2c = self.n2c\n ## self.node2linesegs = {}\n ## for node, coords in n2c.items():\n ## x = coords.x; y = coords.y\n ## v = [(x,y)]\n ## if node.parent:\n ## pcoords = n2c[node.parent]\n ## px = pcoords.x; py = pcoords.y\n ## v.append((px,y))\n ## v.append((px,py))\n ## self.node2linesegs[node] = v\n\n def set_root(self, root):\n self.root = root\n self.leaves = root.leaves()\n self.nleaves = len(self.leaves)\n self.leaf_hsep = 1.0#/float(self.nleaves)\n\n for n in root.descendants():\n if n.length is None:\n self.scaled=False; break\n self.layout()\n\n def plot_tree(self, root=None, **kwargs):\n \"\"\"\n Draw branches and labels\n \"\"\"\n if root and not self.root:\n self.set_root(root)\n\n if self.interactive: pyplot.ioff()\n\n if \"branchlabels\" in kwargs:\n self.branchlabels = kwargs[\"branchlabels\"]\n if \"leaflabels\" in kwargs:\n self.leaflabels = kwargs[\"leaflabels\"]\n self.yaxis.set_visible(False)\n self.create_branch_artists()\n self.create_label_artists()\n if self.highlight_support:\n self.do_highlight_support()\n self.mark_named()\n ## self.home()\n\n for k, v in self.decorators:\n func, args, kwargs = v\n func(self, *args, **kwargs)\n\n self.set_name(self.name)\n self.adjust_xspine()\n\n if self.interactive: pyplot.ion()\n\n labels = [ x.label for x in self.root.leaves() ]\n def fmt(x, pos=None):\n if x<0: return \"\"\n try: return labels[int(round(x))]\n except: pass\n return \"\"\n #self.yaxis.set_major_formatter(FuncFormatter(fmt))\n\n return self\n\n def clade_dimensions(self):\n n2c = self.n2c\n d = {}\n def recurse(n, n2c, d):\n v = []\n for c in n.children:\n recurse(c, n2c, d)\n if c.isleaf:\n x, y = n2c[c].point()\n x0 = x1 = x; y0 = y1 = y\n else:\n x0, x1, y0, y1 = d[c]\n v.append((x0, x1, y0, y1))\n if v:\n x0 = n2c[n].x\n x1 = max([ x[1] for x in v ])\n y0 = min([ x[2] for x in v ])\n y1 = max([ x[3] for x in v ])\n d[n] = (x0, x1, y0, y1)\n recurse(self.root, n2c, d)\n return d\n\n def clade_height_pixels(self):\n ypp = self.ypp()\n d = self.clade_dimensions()\n h = {}\n for n, (x0, x1, y0, y1) in d.items():\n h[n] = (y1-y0)/ypp\n return h\n\n def _decimate_nodes(self, n=500):\n leaves = self.leaves\n nleaves = len(leaves)\n if nleaves > n:\n indices = numpy.linspace(0, nleaves-1, n).astype(int)\n leaves = [ leaves[i] for i in indices ]\n return set(list(chain.from_iterable([ list(x.rootpath())\n for x in leaves ])))\n else:\n return self.root\n\n def create_branch_artists(self):\n \"\"\"\n Use MPL Paths to draw branches\n \"\"\"\n ## patches = []\n verts = []; codes = []\n for node in self.root.descendants():\n v, c = self._path_to_parent(node)\n verts.extend(v); codes.extend(c)\n self.branchpatch = PathPatch(\n Path(verts, codes), fill=False,\n linewidth=self.branch_width,\n edgecolor=self.branch_color\n )\n self.add_patch(self.branchpatch)\n ## for node in self._decimate_nodes():\n ## if node.parent:\n ## for p in self._path_to_parent(node):\n ## patches.append(p)\n ## self.branch_patches = PatchCollection(patches, match_original=True)\n ## self.add_collection(self.branch_patches)\n\n ## print \"enter: create_branch_artists\"\n ## self.node2branch = {}\n ## for node, segs in self.node2linesegs.items():\n ## line = Line2D(\n ## [x[0] for x in segs], [x[1] for x in segs],\n ## lw=self.branch_width, color=self.branch_color\n ## )\n ## line.set_visible(False)\n ## Axes.add_artist(self, line)\n ## self.node2branch[node] = line\n\n ## d = self.node2linesegs\n ## segs = [ d[n] for n in self.root if (n in d) ]\n\n ## dims = self.clade_dimensions(); ypp = self.ypp()\n ## def recurse(n, dims, clades, terminals):\n ## stop = False\n ## h = None\n ## v = dims.get(n)\n ## if v: h = (v[3]-v[2])/ypp\n ## if (h and (h < 20)) or (not h):\n ## stop = True\n ## terminals.append(n)\n ## if not stop:\n ## clades.append(n)\n ## for c in n.children:\n ## recurse(c, dims, clades, terminals)\n ## clades = []; terminals = []\n ## recurse(self.root, dims, clades, terminals)\n ## segs = [ d[n] for n in self.root if (n in d) and (n in clades) ]\n ## for t in terminals:\n ## if t.isleaf:\n ## segs.append(d[t])\n ## else:\n ## x0, x1, y0, y1 = dims[t]\n ## x, y = self.n2c[t].point()\n ## px, py = self.n2c[t.parent].point()\n ## segs.append(((px,py), (px,y), (x,y), (x1, y0), (x1,y1), (x,y)))\n\n ## lc = LineCollection(segs, linewidths=self.branch_width,\n ## colors = self.branch_color)\n ## self.branches_linecollection = Axes.add_collection(self, lc)\n ## print \"leave: create_branch_artists\"\n\n def create_label_artists(self):\n ## print \"enter: create_label_artists\"\n self.node2label = {}\n n2c = self.n2c\n for node, coords in n2c.items():\n x = coords.x; y = coords.y\n if node.isleaf and node.label and self.leaflabels:\n txt = self.annotate(\n node.label,\n xy=(x, y),\n xytext=(self.leaf_offset, 0),\n textcoords=\"offset points\",\n verticalalignment=self.leaf_valign,\n horizontalalignment=self.leaf_halign,\n fontsize=self.leaf_fontsize,\n clip_on=True,\n picker=True\n )\n txt.node = node\n txt.set_visible(False)\n self.node2label[node] = txt\n\n if (not node.isleaf) and node.label and self.branchlabels:\n txt = self.annotate(\n node.label,\n xy=(x, y),\n xytext=(self.branch_offset,0),\n textcoords=\"offset points\",\n verticalalignment=self.branch_valign,\n horizontalalignment=self.branch_halign,\n fontsize=self.branch_fontsize,\n bbox=dict(fc=\"lightyellow\", ec=\"none\", alpha=0.8),\n clip_on=True,\n picker=True\n )\n ## txt.set_visible(False)\n txt.node = node\n self.node2label[node] = txt\n ## print \"leave: create_label_artists\"\n\n def adjust_xspine(self):\n v = sorted([ c.x for c in self.n2c.values() ])\n try:\n self.spines[\"bottom\"].set_bounds(v[0],v[-1])\n except AttributeError:\n pass\n for t,n,s in self.xaxis.iter_ticks():\n if (n > v[-1]) or (n < v[0]):\n t.set_visible(False)\n\n def mark_named(self):\n if self._mark_named:\n n2c = self.n2c\n cv = [ c for n, c in n2c.items() if n.label and (not n.isleaf) ]\n x = [ c.x for c in cv ]\n y = [ c.y for c in cv ]\n if x and y:\n self.scatter(x, y, s=5, color='black')\n\n def home(self):\n td = self.transData\n trans = td.inverted().transform\n xmax = xmin = ymax = ymin = 0\n if self.node2label:\n try:\n v = [ x.get_window_extent() for x in self.node2label.values()\n if x.get_visible() ]\n if v:\n xmax = trans((max([ x.xmax for x in v ]),0))[0]\n xmin = trans((min([ x.xmin for x in v ]),0))[0]\n except RuntimeError:\n pass\n\n v = self.n2c.values()\n ymin = min([ c.y for c in v ])\n ymax = max([ c.y for c in v ])\n xmin = min(xmin, min([ c.x for c in v ]))\n xmax = max(xmax, max([ c.x for c in v ]))\n xspan = xmax - xmin; xpad = xspan*0.05\n yspan = ymax - ymin; ypad = yspan*0.05\n self.set_xlim(xmin-xpad, xmax+xpad*2)\n self.set_ylim(ymin-ypad, ymax+ypad)\n self.adjust_xspine()\n\n def scroll(self, x, y):\n x0, x1 = self.get_xlim()\n y0, y1 = self.get_ylim()\n xd = (x1-x0)*x\n yd = (y1-y0)*y\n self.set_xlim(x0+xd, x1+xd)\n self.set_ylim(y0+yd, y1+yd)\n self.adjust_xspine()\n\n def plot_labelcolor(self, nodemap, state2color=None):\n if state2color is None:\n c = colors.tango()\n states = sorted(set(nodemap.values()))\n state2color = dict(zip(states, c))\n\n for node, txt in self.node2label.items():\n s = nodemap.get(node)\n if s is not None:\n c = state2color[s]\n if c:\n txt.set_color(c)\n self.figure.canvas.draw_idle()\n\n def node_image(self, node, imgfile, maxdim=100, border=0):\n xoff = self.leaf_offset\n n = self.root[node]; c = self.n2c[n]; p = (c.x, c.y)\n img = Image.open(imgfile)\n if max(img.size) > maxdim:\n img.thumbnail((maxdim, maxdim))\n imgbox = OffsetImage(img)\n xycoords = self.node2label.get(node) or \"data\"\n if xycoords != \"data\": p = (1, 0.5)\n abox = AnnotationBbox(imgbox, p,\n xybox=(xoff, 0.0),\n xycoords=xycoords,\n box_alignment=(0.0,0.5),\n pad=0.0,\n boxcoords=(\"offset points\"))\n self.add_artist(abox)\n\n def plot_discrete(self, data, cmap=None, name=None,\n xoff=10, yoff=0, size=15, legend=1):\n root = self.root\n if cmap is None:\n import ivy\n c = colors.tango()\n states = sorted(set(data.values()))\n cmap = dict(zip(states, c))\n n2c = self.n2c\n points = []; c = []\n d = dict([ (n, data.get(n)) for n in root if data.get(n) is not None ])\n for n, v in d.items():\n coord = n2c[n]\n points.append((coord.x, coord.y)); c.append(cmap[v])\n\n boxes = symbols.squares(self, points, c, size, xoff=xoff, yoff=yoff)\n\n if legend:\n handles = []; labels = []\n for v, c in sorted(cmap.items()):\n handles.append(Rectangle((0,0),0.5,1,fc=c))\n labels.append(str(v))\n self.legend(handles, labels, loc=legend)\n\n self.figure.canvas.draw_idle()\n return boxes\n\n def plot_continuous(self, data, mid=None, name=None, cmap=None,\n size=15, colorbar=True):\n area = (size*0.5)*(size*0.5)*numpy.pi\n values = data.values()\n vmin = min(values); vmax = max(values)\n if mid is None:\n mid = (vmin+vmax)*0.5\n delta = vmax-vmin*0.5\n else:\n delta = max(abs(vmax-mid), abs(vmin-mid))\n norm = mpl_colors.Normalize(mid-delta, mid+delta)\n ## if cmap is None: cmap = mpl_colormap.binary\n if cmap is None: cmap = mpl_colormap.hot\n n2c = self.n2c\n X = numpy.array(\n [ (n2c[n].x, n2c[n].y, v) for n, v in data.items() if n in n2c ]\n )\n circles = self.scatter(\n X[:,0], X[:,1], s=area, c=X[:,2], cmap=cmap, norm=norm,\n zorder=1000\n )\n if colorbar:\n cbar = self.figure.colorbar(circles, ax=self, shrink=0.7)\n if name:\n cbar.ax.set_xlabel(name)\n\n self.figure.canvas.draw_idle()\n\nclass RadialTree(Tree):\n def layout(self):\n from ..layout_polar import calc_node_positions\n start = self.start if hasattr(self, 'start') else 0\n end = self.end if hasattr(self, 'end') else None\n self.n2c = calc_node_positions(self.root, scaled=self.scaled,\n start=start, end=end)\n sv = sorted([\n [c.y, c.x, n] for n, c in self.n2c.items()\n ])\n self.coords = sv\n\n ## def _path_to_parent(self, node, width=None, color=None):\n ## c = self.n2c[node]; theta1 = c.angle; r = c.depth\n ## M = Path.MOVETO; L = Path.LINETO\n ## pc = self.n2c[node.parent]; theta2 = pc.angle\n ## px1 = math.cos(math.radians(c.angle))*pc.depth\n ## py1 = math.sin(math.radians(c.angle))*pc.depth\n ## verts = [(c.x,c.y),(px1,py1)]; codes = [M,L]\n ## #verts.append((pc.x,pc.y)); codes.append(L)\n ## path = PathPatch(Path(verts, codes), fill=False,\n ## linewidth=width or self.branch_width,\n ## edgecolor=color or self.branch_color)\n ## diam = pc.depth*2\n ## t1, t2 = tuple(sorted((theta1,theta2)))\n ## arc = Arc((0,0), diam, diam, theta1=t1, theta2=t2,\n ## edgecolor=color or self.branch_color,\n ## linewidth=width or self.branch_width)\n ## return [path, arc]\n\n def _path_to_parent(self, node):\n c = self.n2c[node]; theta1 = c.angle; r = c.depth\n M = Path.MOVETO; L = Path.LINETO\n pc = self.n2c[node.parent]; theta2 = pc.angle\n px1 = math.cos(math.radians(c.angle))*pc.depth\n py1 = math.sin(math.radians(c.angle))*pc.depth\n verts = [(c.x,c.y),(px1,py1)]; codes = [M,L]\n t1, t2 = tuple(sorted((theta1,theta2)))\n diam = pc.depth*2\n arc = Arc((0,0), diam, diam, theta1=t1, theta2=t2)\n arcpath = arc.get_path()\n av = arcpath.vertices * pc.depth\n ac = arcpath.codes\n verts.extend(av.tolist())\n codes.extend(ac.tolist())\n return verts, codes\n\n def highlight(self, nodes=None, width=5, color=\"red\"):\n if self.highlightpatch:\n try:\n self.highlightpatch.remove()\n except:\n pass\n if not nodes:\n return\n\n if len(nodes)>1:\n mrca = self.root.mrca(nodes)\n if not mrca:\n return\n else:\n mrca = list(nodes)[0]\n\n M = Path.MOVETO; L = Path.LINETO\n verts = []\n codes = []\n seen = set()\n patches = []\n for node, coords in [ x for x in self.n2c.items() if x[0] in nodes ]:\n x = coords.x; y = coords.y\n p = node.parent\n while p:\n pcoords = self.n2c[p]\n px = pcoords.x; py = pcoords.y\n if node not in seen:\n v, c = self._path_to_parent(node)\n verts.extend(v)\n codes.extend(c)\n seen.add(node)\n if p == mrca or node == mrca:\n break\n node = p\n coords = self.n2c[node]\n x = coords.x; y = coords.y\n p = node.parent\n ## px, py = verts[-1]\n ## verts.append((px, py)); codes.append(M)\n self.highlightpath = Path(verts, codes)\n self.highlightpatch = PathPatch(\n self.highlightpath, fill=False, linewidth=width, edgecolor=color\n )\n self.add_patch(self.highlightpatch)\n ## self.highlight_patches = PatchCollection(patches, match_original=True)\n ## self.add_collection(self.highlight_patches)\n\n\nclass OverviewTree(Tree):\n def __init__(self, *args, **kwargs):\n kwargs[\"leaflabels\"] = False\n kwargs[\"branchlabels\"] = False\n Tree.__init__(self, *args, **kwargs)\n self.xaxis.set_visible(False)\n self.spines[\"bottom\"].set_visible(False)\n self.add_overview_rect()\n\n def set_target(self, target):\n self.target = target\n\n def add_overview_rect(self):\n rect = UpdatingRect([0, 0], 0, 0, facecolor='black', edgecolor='red')\n rect.set_alpha(0.2)\n rect.target = self.target\n rect.set_bounds(*self.target.viewLim.bounds)\n self.zoomrect = rect\n self.add_patch(rect)\n ## if pyplot.isinteractive():\n self.target.callbacks.connect('xlim_changed', rect)\n self.target.callbacks.connect('ylim_changed', rect)\n\n def redraw(self):\n Tree.redraw(self)\n self.add_overview_rect()\n self.figure.canvas.draw_idle()\n\ndef axes_enter(e):\n ax = e.inaxes\n ax._active = True\n\ndef axes_leave(e):\n ax = e.inaxes\n ax._active = False\n\ndef onselect(estart, estop):\n b = estart.button\n ## print b, estart.key\n\ndef onkeypress(e):\n ax = e.inaxes\n k = e.key\n if ax and k == 't':\n ax.home()\n if ax and k == \"down\":\n ax.scroll(0, -0.1)\n ax.figure.canvas.draw_idle()\n if ax and k == \"up\":\n ax.scroll(0, 0.1)\n ax.figure.canvas.draw_idle()\n if ax and k == \"left\":\n ax.scroll(-0.1, 0)\n ax.figure.canvas.draw_idle()\n if ax and k == \"right\":\n ax.scroll(0.1, 0)\n ax.figure.canvas.draw_idle()\n if ax and k and k in '=+':\n ax.zoom(0.1,0.1)\n if ax and k == '-':\n ax.zoom(-0.1,-0.1)\n\ndef ondrag(e):\n ax = e.inaxes\n button = e.button\n if ax and button == 2:\n if not ax.pan_start:\n ax.pan_start = (e.xdata, e.ydata)\n return\n x, y = ax.pan_start\n xdelta = x - e.xdata\n ydelta = y - e.ydata\n x0, x1 = ax.get_xlim()\n xspan = x1-x0\n y0, y1 = ax.get_ylim()\n yspan = y1 - y0\n midx = (x1+x0)*0.5\n midy = (y1+y0)*0.5\n ax.set_xlim(midx+xdelta-xspan*0.5, midx+xdelta+xspan*0.5)\n ax.set_ylim(midy+ydelta-yspan*0.5, midy+ydelta+yspan*0.5)\n ax.adjust_xspine()\n\ndef onbuttonrelease(e):\n ax = e.inaxes\n button = e.button\n if button == 2:\n ## print \"pan end\"\n ax.pan_start = None\n\ndef onpick(e):\n ax = e.mouseevent.inaxes\n if ax:\n ax.picked(e)\n\ndef onscroll(e):\n ax = e.inaxes\n if ax:\n b = e.button\n ## print b\n k = e.key\n if k == None and b ==\"up\":\n ax.zoom(0.1,0.1)\n if k == None and b ==\"down\":\n ax.zoom(-0.1,-0.1)\n if k == \"shift\" and b == \"up\":\n ax.zoom_cxy(0.1, 0, e.xdata, e.ydata)\n if k == \"shift\" and b == \"down\":\n ax.zoom_cxy(-0.1, 0, e.xdata, e.ydata)\n if k == \"control\" and b == \"up\":\n ax.zoom_cxy(0, 0.1, e.xdata, e.ydata)\n if k == \"control\" and b == \"down\":\n ax.zoom_cxy(0, -0.1, e.xdata, e.ydata)\n if k == \"d\" and b == \"up\":\n ax.scroll(0, 0.1)\n if (k == \"d\" and b == \"down\"):\n ax.scroll(0, -0.1)\n if k == \"c\" and b == \"up\":\n ax.scroll(-0.1, 0)\n if k == \"c\" and b == \"down\":\n ax.scroll(0.1, 0)\n try: ax.adjust_xspine()\n except: pass\n ax.figure.canvas.draw_idle()\n\ndef onclick(e):\n ax = e.inaxes\n if ax and e.button==1 and hasattr(ax, \"zoomrect\") and ax.zoomrect:\n # overview clicked; reposition zoomrect\n r = ax.zoomrect\n x = e.xdata\n y = e.ydata\n arr = ax.transData.inverted().transform(r.get_extents())\n xoff = (arr[1][0]-arr[0][0])*0.5\n yoff = (arr[1][1]-arr[0][1])*0.5\n r.target.set_xlim(x-xoff,x+xoff)\n r.target.set_ylim(y-yoff,y+yoff)\n r(r.target)\n ax.figure.canvas.draw_idle()\n\n if ax and e.button==2:\n ## print \"pan start\", (e.xdata, e.ydata)\n ax.pan_start = (e.xdata, e.ydata)\n\n\ndef test_decorate(treeplot):\n import evolve\n data = evolve.brownian(treeplot.root)\n values = data.values()\n vmin = min(values); vmax = max(values)\n norm = mpl_colors.Normalize(vmin, vmax)\n cmap = mpl_colormap.binary\n n2c = treeplot.n2c\n X = numpy.array(\n [ (n2c[n].x, n2c[n].y, v)\n for n, v in data.items() if n in n2c ]\n )\n circles = treeplot.scatter(\n X[:,0], X[:,1], s=200, c=X[:,2], cmap=cmap, norm=norm,\n zorder=100\n )\n\nclass Decorator(object):\n def __init__(self, treeplot):\n self.plot = treeplot\n\nclass VisToggle(object):\n def __init__(self, name, treeplot=None, value=False):\n self.name = name\n self.plot = treeplot\n self.value = value\n\n def __nonzero__(self):\n return self.value\n\n def __repr__(self):\n return \"%s: %s\" % (self.name, self.value)\n\n def redraw(self):\n if self.plot:\n self.plot.redraw()\n\n def toggle(self):\n self.value = not self.value\n self.redraw()\n\n def show(self):\n if self.value == False:\n self.value = True\n self.redraw()\n\n def hide(self):\n if self.value == True:\n self.value = False\n self.redraw()\n\n\nTreePlot = subplot_class_factory(Tree)\nRadialTreePlot = subplot_class_factory(RadialTree)\nOverviewTreePlot = subplot_class_factory(OverviewTree)\n\nif __name__ == \"__main__\":\n import evolve\n root, data = evolve.test_brownian()\n plot_continuous(root, data, name=\"Brownian\", mid=0.0)\n"}, "files_after": {"ivy/__init__.py": "\"\"\"\nivy - a phylogenetics library and visual shell\nhttp://www.reelab.net/ivy\n\nCopyright 2010 Richard Ree \n\nRequired: ipython, matplotlib, scipy, numpy\nUseful: dendropy, biopython, etc.\n\"\"\"\n## This program is free software; you can redistribute it and/or\n## modify it under the terms of the GNU General Public License as\n## published by the Free Software Foundation; either version 3 of the\n## License, or (at your option) any later version.\n\n## This program is distributed in the hope that it will be useful, but\n## WITHOUT ANY WARRANTY; without even the implied warranty of\n## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n## General Public License for more details.\n\n## You should have received a copy of the GNU General Public License\n## along with this program. If not, see\n## .\n\nfrom . import tree, layout, contrasts, ages\nfrom . import genbank, nexus, newick, storage\n# from . import bipart\n# import nodearray, data\n# from . import treebase\n# import db\n# import contrib\ntry:\n import ltt as _ltt\n ltt = _ltt.ltt\nexcept ImportError:\n pass\n\nfrom . import align, sequtil\n# from . import chars, align, sequtil\n## try: import vis\n## except RuntimeError: pass\n", "ivy/ages.py": "\"\"\"\nCalculate node ages from branch lengths.\n\nThe function of interest is `ages2lengths`\n\"\"\"\nfrom __future__ import print_function\n\ndef ages2lengths(node, node_ages, results={}):\n \"\"\"\n Convert node ages to branch lengths\n\n Args:\n node (Node): Node object\n node_ages (dict): Dict mapping nodes to ages\n Returns:\n dict: mapping of nodes to lengths\n\n \"\"\"\n for d in node.descendants():\n age = node_ages[d]\n if d.parent:\n parent_age = node_ages[d.parent]\n results[d] = parent_age - age\n return results\n\ndef min_ages(node, leaf_ages, results={}):\n \"\"\"\n Calculate minimum ages given fixed ages in leaf_ages\n\n Args:\n node (Node): A node object\n leaf_ages (dict): A dict mapping leaf nodes to ages\n Returns:\n dict: mapping of nodes to ages\n \"\"\"\n v = []\n for child in node.children:\n if child.label and (child.label in leaf_ages):\n age = leaf_ages[child.label]\n v.append(age)\n results[child] = age\n else:\n min_ages(child, leaf_ages, results)\n age = results[child]\n v.append(age)\n results[node] = max(v)\n return results\n\ndef smooth(node, node_ages, results={}):\n \"\"\"\n adjust ages of internal nodes by smoothing\n RR: I don't actually know what this function does -CZ\n \"\"\"\n if node.parent:\n parent_age = node_ages[node.parent]\n if node.children:\n max_child_age = max([ node_ages[child] for child in node.children ])\n # make the new age the average of parent and max child\n new_node_age = (parent_age + max_child_age)/2.0\n results[node] = new_node_age\n else:\n results[node] = node_ages[node]\n else:\n results[node] = node_ages[node]\n for child in node.children:\n smooth(child, node_ages, results)\n return results\n\nif __name__ == \"__main__\":\n import newick, ascii\n\n s = \"((((a,b),(c,d),(e,f)),g),h);\"\n root = newick.parse(s)\n\n leaf_ages = {\n \"a\": 3,\n \"b\": 2,\n \"c\": 4,\n \"d\": 1,\n \"e\": 3,\n \"f\": 0.5,\n \"g\": 10,\n \"h\": 5,\n }\n\n ma = min_ages(root, leaf_ages)\n d = ma\n for i in range(10):\n d = smooth(root, d)\n for node, val in ages2lengths(root, d).items():\n node.length = val\n print(ascii.render(root, scaled=1))\n", "ivy/align.py": "import os\nfrom subprocess import Popen, PIPE\nfrom Bio import AlignIO\nfrom Bio.Alphabet import IUPAC\ntry:\n from cStringIO import StringIO\nexcept ImportError:\n from io import StringIO\nfrom tempfile import NamedTemporaryFile\n\nMUSCLE = \"/usr/bin/muscle\"\n\ndef muscle(seqs, cmd=None):\n if not cmd: cmd = MUSCLE\n assert os.path.exists(cmd)\n p = Popen([cmd], stdin=PIPE, stdout=PIPE)\n write = p.stdin.write\n for x in seqs:\n write(\">%s\\n%s\\n\" % (x.id, x.seq))\n out = p.communicate()[0]\n aln = AlignIO.read(StringIO(out), 'fasta', alphabet=IUPAC.ambiguous_dna)\n return aln\n\ndef musclep(seqs1, seqs2, cmd=\"/usr/bin/muscle\"):\n assert os.path.exists(cmd)\n f1 = NamedTemporaryFile(); f2 = NamedTemporaryFile()\n for s, f in ((seqs1, f1), (seqs2, f2)):\n write = f.file.write\n for x in s: write(\">%s\\n%s\\n\" % (x.id, x.seq))\n f1.file.flush(); f2.file.flush()\n cmd += \" -profile -in1 %s -in2 %s\" % (f1.name, f2.name)\n p = Popen(cmd.split(), stdout=PIPE)\n out = p.communicate()[0]\n aln = AlignIO.read(StringIO(out), 'fasta', alphabet=IUPAC.ambiguous_dna)\n f1.file.close(); f2.file.close()\n return aln\n \ndef read(data, format=None, name=None):\n from types import StringTypes\n \n def strip(s):\n fname = os.path.split(s)[-1]\n head, tail = os.path.splitext(fname)\n tail = tail.lower()\n if tail in (\".fasta\", \".nex\", \".nexus\"):\n return head\n else:\n return fname\n\n if (not format):\n if (type(data) in StringTypes) and os.path.isfile(data):\n s = data.lower()\n if s.endswith(\"fasta\"):\n format=\"fasta\"\n for tail in \".nex\", \".nexus\":\n if s.endswith(tail):\n format=\"nexus\"\n break\n\n if (not format):\n format = \"fasta\"\n\n if type(data) in StringTypes:\n if os.path.isfile(data):\n name = strip(data)\n with open(data) as f:\n return AlignIO.read(f, format, alphabet=IUPAC.ambiguous_dna)\n else:\n f = StringIO(data)\n return AlignIO.read(f, format, alphabet=IUPAC.ambiguous_dna)\n\n elif (hasattr(data, \"tell\") and hasattr(data, \"read\")):\n treename = strip(getattr(data, \"name\", None))\n return AlignIO.read(data, format, alphabet=IUPAC.ambiguous_dna)\n\n raise IOError(\"unable to read alignment from '%s'\" % data)\n\ndef write(data, f, format='fasta'):\n AlignIO.write(data, f, format)\n \ndef find(aln, substr):\n \"\"\"\n generator that yields (seqnum, pos) tuples for every position of\n ``subseq`` in `aln`\n \"\"\"\n from sequtil import finditer\n N = len(substr)\n for i, rec in enumerate(aln):\n for j in finditer(rec.seq, substr):\n yield (i,j)\n \ndef find_id(aln, regexp):\n import re\n return [ (i,s) for i, s in enumerate(aln) if re.search(regexp, s.id) ]\n \ndef gapcols(aln, c='-'):\n from numpy import array\n a = array([ list(x.seq) for x in aln ])\n for i, col in enumerate(a.T):\n s = set(col==c)\n if len(s)==1 and True in s:\n yield i\n", "ivy/ascii.py": "from __future__ import print_function\nimport math\nfrom array import array\nfrom .layout import depth_length_preorder_traversal\n\nclass AsciiBuffer:\n def __init__(self, width, height):\n self.width = int(width)\n self.height = int(height)\n self._b = [ array('u', ' '*self.width) for line in range(self.height) ]\n\n def putstr(self, r, c, s):\n assert r < self.height\n assert c+len(s) <= self.width, \"%s %s %s '%s'\" % (self.width, r, c, s)\n c = int(c)\n self._b[r][c:c+len(s)+1] = array('u', s)\n\n def __str__(self):\n return \"\\n\".join([ ''.join(b) for b in self._b ])\n\ndef sum_to_root(node, internodes=True, length=False):\n \"\"\"\n Number of branches from node to root.\n\n Args:\n node (Node): A Node object\n RR: Do internodes and length do anything in this function? -CZ\n Returns:\n int: The number of branches from node to root.\n \"\"\"\n i = 0\n n = node\n while 1:\n if not n.parent:\n break\n else:\n n = n.parent\n i += 1\n return i\n\n## def depth_length_preorder_traversal(node):\n## if not node.parent:\n## node.depth = 0\n## node.length_to_root = 0.0\n## else:\n## p = node.parent\n## node.depth = p.depth + 1\n## node.length_to_root = p.length_to_root + (node.length or 0.0)\n## for ch in node.children:\n## depth_length_preorder_traversal(ch)\n\ndef smooth_cpos(node, n2c):\n for ch in node.children:\n smooth_cpos(ch, n2c)\n\n if node.parent and not node.isleaf:\n px = n2c[node.parent].c\n cx = min([ n2c[ch].c for ch in node.children ])\n dxp = n2c[node].c - px\n cxp = cx - n2c[node].c\n n2c[node].c = px + (cx - px)*0.5\n\ndef scale_cpos(node, n2c, scalef, root_offset):\n if node.parent:\n n2c[node].c = n2c[node.parent].c + (node.length * scalef)\n else:\n n2c[node].c = root_offset\n\n for ch in node.children:\n scale_cpos(ch, n2c, scalef, root_offset)\n\ndef set_rpos(node, n2c):\n for child in node.children:\n set_rpos(child, n2c)\n nc = n2c[node]\n if node.children:\n children = node.children\n c0 = n2c[children[0]]\n c1 = n2c[children[-1]]\n rmin = c0.r; rmax = c1.r\n nc.r = math.ceil(rmin + (rmax-rmin)/2.0)\n\ndef render(root, unitlen=3, minwidth=50, maxwidth=None, scaled=False,\n show_internal_labels=True):\n \"\"\"\n Create the ascii tree to be shown with print()\n \"\"\"\n n2c = depth_length_preorder_traversal(root)\n leaves = root.leaves(); nleaves = len(leaves)\n maxdepth = max([ n2c[lf].depth for lf in leaves ])\n max_labelwidth = max([ len(lf.label) for lf in leaves ]) + 1\n\n root_offset = 0\n if root.label and show_internal_labels:\n root_offset = len(root.label)\n\n width = maxdepth*unitlen + max_labelwidth + 2 + root_offset\n height = 2*nleaves - 1\n\n if width < minwidth:\n unitlen = math.ceil((minwidth - max_labelwidth - 2 - root_offset)/maxdepth)\n width = maxdepth*unitlen + max_labelwidth + 2 + root_offset\n\n print(width, height)\n buf = AsciiBuffer(width, height)\n\n for i, lf in enumerate(leaves):\n c = n2c[lf]\n c.c = width - max_labelwidth - 2\n c.r = i*2\n\n for node in root.postiter():\n nc = n2c[node]\n if node.children:\n children = node.children\n c0 = n2c[children[0]]\n c1 = n2c[children[-1]]\n rmin = c0.r; rmax = c1.r\n nc.r = math.ceil(rmin + (rmax-rmin)/2.0)\n nc.c = min([ n2c[ch].c for ch in children ]) - unitlen\n\n\n if not scaled:\n smooth_cpos(root, n2c)\n else:\n maxlen = max([ n2c[lf].length_to_root for lf in leaves ])\n scalef = (n2c[leaves[0]].c + 1 - root_offset)/maxlen\n scale_cpos(root, n2c, scalef, root_offset)\n\n for node in root.postiter():\n nc = n2c[node]\n if node.parent:\n pc = n2c[node.parent]\n for r in range(min([nc.r, pc.r]),\n max([nc.r, pc.r])):\n buf.putstr(r, pc.c, \":\")\n\n sym = getattr(nc, \"hchar\", \"-\")\n vbar = sym*math.floor(nc.c-pc.c)\n buf.putstr(nc.r, math.ceil(pc.c), vbar)\n\n if node.isleaf:\n buf.putstr(nc.r, nc.c+1, \" \"+node.label)\n else:\n if node.label and show_internal_labels:\n buf.putstr(nc.r, nc.c-len(node.label), node.label)\n\n buf.putstr(nc.r, nc.c, \"+\")\n\n return str(buf)\n\nif __name__ == \"__main__\":\n from . import tree\n t = tree.read(\n \"(foo,((bar,(dog,cat)dc)dcb,(shoe,(fly,(cow, bowwow)cowb)cbf)X)Y)Z;\"\n )\n\n #t = tree.read(\"(((foo:4.6):5.6, (bar:6.5, baz:2.3):3.0):3.0);\")\n #t = tree.read(\"(foo:4.6, (bar:6.5, baz:2.3)X:3.0)Y:3.0;\")\n\n i = 1\n print(render(t, scaled=0, show_internal_labels=1))\n r = t.get(\"cat\").parent\n tree.reroot(t, r)\n tp = t.parent\n tp.remove_child(t)\n c = t.children[0]\n t.remove_child(c)\n tp.add_child(c)\n print(render(r, scaled=0, show_internal_labels=1))\n", "ivy/autocollapse.py": "\"\"\"\nFor drawing big trees. Calculate which clades can be 'collapsed' and\ndisplayed with a placeholder.\n\nTODO: test and develop this module further\n\"\"\"\nfrom storage import Storage\n\ndef autocollapse_info(node, collapsed, visible=True, info={}):\n \"\"\"\n gather information to determine if a node should be collapsed\n\n *collapsed* is a set containing nodes that are already collapsed\n \"\"\"\n if node not in info:\n s = Storage()\n info[node] = s\n else:\n s = info[node]\n \n if visible and (node in collapsed):\n visible = False\n \n nnodes = 1 # total number of nodes, including node\n # number of visible leaves\n nvisible = int((visible and node.isleaf) or (node in collapsed))\n ntips = int(node.isleaf)\n ntips_visible = int(node.isleaf and visible)\n s.has_labeled_descendant = False\n s.depth = 1\n\n for child in node.children:\n autocollapse_info(child, collapsed, visible, info)\n cs = info[child]\n nnodes += cs.nnodes\n nvisible += cs.nvisible\n ntips += cs.ntips\n ntips_visible += cs.ntips_visible\n if (child.label and (not child.isleaf)) \\\n or (cs.has_labeled_descendant):\n s.has_labeled_descendant = True\n if cs.depth >= s.depth:\n s.depth = cs.depth+1\n s.nnodes = nnodes\n s.nvisible = nvisible\n s.ntips = ntips\n s.ntips_visible = ntips_visible\n return info\n\ndef autocollapse(root, collapsed=None, keep_visible=None, max_visible=1000):\n \"\"\"\n traverse a tree and find nodes that should be collapsed in order\n to satify *max_visible*\n\n *collapsed* is a set object for storing collapsed nodes\n\n *keep_visible* is a set object of nodes that should not be placed\n in *collapsed*\n \"\"\"\n collapsed = collapsed or set()\n keep_visible = keep_visible or set()\n ntries = 0\n while True:\n if ntries > 10:\n return\n info = autocollapse_info(root, collapsed)\n nvisible = info[root].nvisible\n if nvisible <= max_visible:\n return\n \n v = []\n for node in root.iternodes():\n s = info[node]\n if (node.label and (not node.isleaf) and node.parent and\n (node not in keep_visible)):\n w = s.nvisible/float(s.depth)\n if s.has_labeled_descendant:\n w *= 0.25\n v.append((w, node, s))\n v.sort(); v.reverse()\n for w, node, s in v:\n if node not in keep_visible and s.nvisible < (nvisible-1):\n print(node)\n collapsed.add(node)\n nvisible -= s.nvisible\n if nvisible <= max_visible:\n break\n ntries += 1\n return collapsed\n", "ivy/bipart.py": "from collections import defaultdict\n\n## class BipartSet(object):\n## \"A set of bipartitions\"\n## def __init__(self, elements):\n## self.elements = frozenset(elements)\n## self.ref = sorted(elements)[0]\n## self.node2bipart = Storage()\n\n## def add(self, subset, node):\n## # filter out elements of subset not in 'elements'\n## subset = (frozenset(subset) & self.elements)\n## if self.ref not in self.subset:\n## self.subset = self.elements - self.subset\n\nclass Bipart(object):\n \"\"\"\n A class representing a bipartition.\n \"\"\"\n def __init__(self, elements, subset, node=None, support=None):\n \"\"\"\n 'elements' and 'subset' are set objects\n \"\"\"\n self.subset = subset\n self.compute(elements)\n self.node = node\n self.support = support\n\n def __hash__(self):\n return self._hash\n\n def __eq__(self, other):\n assert self.elements == other.elements\n return ((self.subset == other.subset) or\n (self.subset == (self.elements - other.subset)))\n\n def __repr__(self):\n v = sorted(self.subset)\n return \"(%s)\" % \" \".join(map(str, v))\n\n def compute(self, elements):\n self.elements = frozenset(elements)\n self.ref = sorted(elements)[0]\n # filter out elements of subset not in 'elements'\n self.subset = (frozenset(self.subset) & self.elements)\n self._hash = hash(self.subset)\n if self.ref not in self.subset:\n self.subset = self.elements - self.subset\n self.complement = self.elements - self.subset\n\n def iscompatible(self, other):\n ## assert self.elements == other.elements\n if (self.subset.issubset(other.subset) or\n other.subset.issubset(self.subset)):\n return True\n if (((self.subset | other.subset) == self.elements) or\n (not (self.subset & other.subset))):\n return True\n return False\n\ndef conflict(bp1, bp2, support=None):\n if ((support and (bp1.support >= support) and (bp2.support >= support))\n or (not support)):\n if not bp1.iscompatible(bp2):\n return True\n return False\n\nclass TreeSet:\n def __init__(self, root, elements=None):\n self.root = root\n self.node2labels = root.leafsets(labels=True)\n self.elements = elements or self.node2labels.pop(root)\n self.biparts = [ Bipart(self.elements, v, node=k,\n support=int(k.label or 0))\n for k, v in self.node2labels.items() ]\n\ndef compare_trees(r1, r2, support=None):\n e = (set([ x.label for x in r1.leaves() ]) &\n set([ x.label for x in r2.leaves() ]))\n bp1 = [ Bipart(e, v, node=k, support=int(k.label or 0))\n for k, v in r1.leafsets(labels=True).items() ]\n bp2 = [ Bipart(e, v, node=k, support=int(k.label or 0))\n for k, v in r2.leafsets(labels=True).items() ]\n return compare(bp1, bp2, support)\n\ndef compare(set1, set2, support=None):\n hits1 = []; hits2 = []\n conflicts1 = defaultdict(set); conflicts2 = defaultdict(set)\n for bp1 in set1:\n for bp2 in set2:\n if bp1 == bp2:\n hits1.append(bp1.node); hits2.append(bp2.node)\n if conflict(bp1, bp2, support):\n conflicts1[bp1.node].add(bp2.node)\n conflicts2[bp2.node].add(bp1.node)\n return hits1, hits2, conflicts1, conflicts2\n \n## a = Bipart(\"abcdef\", \"abc\")\n## b = Bipart(\"abcdef\", \"def\")\n## c = Bipart(\"abcdef\", \"ab\")\n## d = Bipart(\"abcdef\", \"cd\")\n## print a == b\n## print a.iscompatible(b)\n## print a.iscompatible(c)\n## print a.iscompatible(d)\n## print c.iscompatible(d)\n## sys.exit() \n", "ivy/chars/__init__.py": "from . import mk, catpars, evolve\n", "ivy/chars/catpars.py": "from __future__ import print_function\nimport numpy as np\n\ndef default_costmatrix(numstates, dtype=np.int):\n \"a square array with zeroes along the diagonal, ones elsewhere\"\n return np.logical_not(np.identity(numstates)).astype(float)\n\ndef minstates(v):\n \"return the indices of v that equal the minimum\"\n return np.nonzero(np.equal(v, min(v)))\n\ndef downpass(node, states, stepmatrix, chardata, node2dpv=None):\n if node2dpv is None:\n node2dpv = {}\n \n if not node.isleaf:\n for child in node.children:\n downpass(child, states, stepmatrix, chardata, node2dpv)\n\n dpv = np.zeros([len(states)])\n node2dpv[node] = dpv\n for i in states:\n for child in node.children:\n child_dpv = node2dpv[child]\n mincost = min([ child_dpv[j] + stepmatrix[i,j] \\\n for j in states ])\n dpv[i] += mincost\n \n #print node.label, node.dpv\n\n else:\n #print node.label, chardata[node.label]\n node2dpv[node] = stepmatrix[:,chardata[node.label]]\n\n return node2dpv\n \n\ndef uppass(node, states, stepmatrix, node2dpv, node2upm={},\n node2ancstates=None):\n parent = node.parent\n if not node.isleaf:\n if parent is None: # root\n dpv = node2dpv[node]\n upm = None\n node.mincost = min(dpv)\n node2ancstates = {node: minstates(dpv)}\n \n else:\n M = np.zeros(stepmatrix.shape)\n for i in states:\n sibs = [ c for c in parent.children if c is not node ]\n for j in states:\n c = 0\n for sib in sibs:\n sibdpv = node2dpv[sib]\n c += min([ sibdpv[x] + stepmatrix[j,x]\n for x in states ])\n c += stepmatrix[j,i]\n\n p_upm = node2upm.get(parent)\n if p_upm is not None:\n c += min(p_upm[j])\n\n M[i,j] += c\n \n node2upm[node] = M\n\n v = node2dpv[node][:]\n for s in states:\n v[s] += min(M[s])\n node2ancstates[node] = minstates(v)\n\n for child in node.children:\n uppass(child, states, stepmatrix, node2dpv, node2upm,\n node2ancstates)\n\n return node2ancstates\n \ndef ancstates(tree, chardata, stepmatrix):\n states = range(len(stepmatrix))\n return uppass(tree, states, stepmatrix,\n downpass(tree, states, stepmatrix, chardata))\n\ndef _bindeltran(node, stepmatrix, node2dpv, node2deltr=None, ancstate=None):\n if node2deltr is None:\n node2deltr = {}\n\n dpv = node2dpv[node]\n if ancstate is not None:\n c, s = min([ (cost+stepmatrix[ancstate,i], i) \\\n for i, cost in enumerate(dpv) ])\n else:\n c, s = min([ (cost, i) for i, cost in enumerate(dpv) ])\n \n node2deltr[node] = s\n for child in node.children:\n _bindeltran(child, stepmatrix, node2dpv, node2deltr, s)\n\n return node2deltr\n \ndef binary_deltran(tree, chardata, stepmatrix):\n states = range(len(stepmatrix))\n node2dpv = downpass(tree, states, stepmatrix, chardata)\n node2deltr = _bindeltran(tree, stepmatrix, node2dpv)\n return node2deltr\n \n\nif __name__ == \"__main__\":\n from pprint import pprint\n from ivy import tree\n root = tree.read(\"(a,((b,c),(d,(e,f))));\")\n\n nstates = 4\n states = range(nstates)\n cm = default_costmatrix(nstates)\n chardata = dict(zip(\"abcdef\", map(int, \"000233\")))\n dp = downpass(root, states, cm, chardata)\n\n for i, node in enumerate(root):\n if not node.label:\n node.label = \"N%s\" % i\n else:\n node.label = \"%s (%s)\" % (node.label, chardata[node.label])\n\n print(ascii.render(root))\n \n\n## nstates = 2\n## leaves = tree.leaves() \n## for leaf in leaves:\n## leaf.anc_cost_vector = chardata[leaf.label]\n\n pprint(\n #ancstates(root, chardata, cm)\n #uppass(root, states, cm, downpass(tree, states, cm, chardata))\n dp\n )\n\n\n", "ivy/chars/evolve.py": "#!/usr/bin/env python\n\"\"\"\nFunctions for evolving traits and trees.\n\"\"\"\nfrom __future__ import print_function\n\ndef brownian(root, sigma=1.0, init=0.0, values={}):\n \"\"\"\n Recursively evolve a trait by Brownian motion up from the node\n *root*.\n\n * *sigma*: standard deviation of the normal random variate after\n one unit of branch length\n\n * *init*: initial value\n\n Returns: *values* - a dictionary mapping nodes to evolved values\n \"\"\"\n from scipy.stats import norm\n values[root] = init\n for child in root.children:\n time = child.length\n random_step = norm.rvs(init, scale=sigma*time)\n brownian(child, sigma, random_step, values)\n return values\n\ndef test_brownian():\n \"\"\"\n Evolve a trait up an example tree of primates:.\n\n ((((Homo:0.21,Pongo:0.21)N1:0.28,Macaca:0.49)N2:0.13,\n Ateles:0.62)N3:0.38,Galago:1.00)root;\n\n Returns: (*root*, *data*) - the root node and evolved data.\n \"\"\"\n import newick\n root = newick.parse(\n \"((((Homo:0.21,Pongo:0.21)N1:0.28,Macaca:0.49)N2:0.13,\"\\\n \"Ateles:0.62)N3:0.38,Galago:1.00)root;\"\n )\n print(root.ascii(scaled=True))\n evolved = brownian(root)\n for node in root.iternodes():\n print(node.label, evolved[node])\n return root, evolved\n\nif __name__ == \"__main__\":\n test_brownian()\n", "ivy/chars/mk.py": "\"\"\"\nCategorical Markov models with k states.\n\"\"\"\nfrom __future__ import print_function\nimport numpy, scipy, random\nimport scipy.linalg\nimport scipy.optimize\nfrom math import log, exp\nrand = random.Random()\nuniform = rand.uniform; expovariate = rand.expovariate\n\nLARGE = 10e10 # large -lnL value used to bound parameter optimization\n\nclass Q:\n def __init__(self, k=2, layout=None):\n \"\"\"\n Represents a square transition matrix with k states.\n \n 'layout' is a square (k,k) array of integers that index free\n rate parameters (values on the diagonal are ignored). Cells\n with value 0 will have the first rate parameter, 1 the\n second, etc.\n \"\"\"\n self.k = k\n self.range = range(k)\n self.offdiag = array(numpy.eye(k)==0, dtype=numpy.int)\n if layout is None:\n layout = zeros((k,k), numpy.int)\n self.layout = layout*self.offdiag\n\n def fill(self, rates):\n m = numpy.take(rates, self.layout)*self.offdiag\n v = m.sum(1) * -1\n for i in self.range:\n m[i,i] = v[i]\n return m\n\n def default_priors(self):\n p = 1.0/self.k\n return [p]*self.k\n\ndef sample_weighted(weights):\n u = uniform(0, sum(weights))\n x = 0.0\n for i, w in enumerate(weights):\n x += w\n if u < x:\n break\n return i\n\ndef conditionals(root, data, Q):\n nstates = Q.shape[0]\n states = range(nstates)\n nodes = [ x for x in root.postiter() ]\n nnodes = len(nodes)\n v = zeros((nnodes,nstates))\n n2i = {}\n \n for i, n in enumerate(nodes):\n n2i[n] = i\n if n.isleaf:\n state = data[n.label]\n try:\n state = int(state)\n v[i,state] = 1.0\n except ValueError:\n if state == '?' or state == '-':\n v[i,:] += 1/float(nstates)\n else:\n Pv = [ (expm(Q*child.length)*v[n2i[child]]).sum(1)\n for child in n.children ]\n v[i] = numpy.multiply(*Pv)\n # fossils\n state = None\n if n.label in data:\n state = int(data[n.label])\n elif n in data:\n state = int(data[n])\n if state != None:\n for s in states:\n if s != state: v[i,s] = 0.0\n \n return dict([ (n, v[i]) for n,i in n2i.items() ])\n\ndef contrasts(root, data, Q):\n cond = conditionals(root, data, Q)\n d = {}\n for n in root.postiter(lambda x:x.children):\n nc = cond[n]; nc /= sum(nc)\n diff = 0.0\n for child in n.children:\n cc = cond[child]; cc /= sum(cc)\n diff += numpy.sum(numpy.abs(nc-cc))\n d[n] = diff\n return d\n\ndef lnL(root, data, Q, priors):\n d = conditionals(root, data, Q)\n return numpy.log(sum(d[root]*priors))\n\ndef optimize(root, data, Q, priors=None):\n Qfill = Q.fill\n if priors is None: priors = Q.default_priors()\n def f(params):\n if (params<0).any(): return LARGE\n return -lnL(root, data, Qfill(params), priors)\n \n # initial parameter values\n p = [1.0]*len(set(Q.layout.flat))\n\n v = scipy.optimize.fmin_powell(\n f, p, full_output=True, disp=0, callback=None\n )\n params, neglnL = v[:2]\n if neglnL == LARGE:\n raise Exception(\"ConvergenceError\")\n return params, neglnL\n\ndef sim(root, n2p, s0, d=None):\n if d is None:\n d = {root:s0}\n for n in root.children:\n v = n2p[n][s0]\n i = sample_weighted(v)\n d[n] = i\n sim(n, n2p, i, d)\n return d\n\ndef stmap(root, states, ancstates, Q, condition_on_success):\n \"\"\"\n This and its dependent functions below need testing and\n optimization.\n \"\"\"\n results = []\n for n in root.descendants():\n si = ancstates[n.parent]\n sj = ancstates[n]\n v = simulate_on_branch(states, si, sj, Q, n.length,\n condition_on_success)\n print(n, si, sj)\n if v:\n results.append(v)\n else:\n return None\n return results\n\ndef simulate_on_branch(states, si, sj, Q, brlen, condition_on_success):\n point = 0.0\n history = [(si, point)]\n if si != sj: # condition on one change occurring\n lambd = -(Q[si,si])\n U = uniform(0.0, 1.0)\n # see appendix of Nielsen 2001, Genetics\n t = brlen - point\n newpoint = -(1.0/lambd) * log(1.0 - U*(1.0 - exp(-lambd * t)))\n newstate = draw_new_state(states, Q, si)\n history.append((newstate, newpoint))\n si = newstate; point = newpoint\n while 1:\n lambd = -(Q[si,si])\n rv = expovariate(lambd)\n newpoint = point + rv\n\n if newpoint <= brlen: # state change along branch\n newstate = draw_new_state(states, Q, si)\n history.append((newstate, newpoint))\n si = newstate; point = newpoint\n else:\n history.append((si, brlen))\n break\n \n if si == sj or (not condition_on_success): # success\n return history\n\n return None\n \ndef draw_new_state(states, Q, si):\n \"\"\"\n Given a rate matrix Q, a starting state si, and an ordered\n sequence of states, eg (0, 1), draw a new state sj with\n probability -(qij/qii)\n \"\"\"\n Qrow = Q[si]\n qii = Qrow[si]\n qij_probs = [ (x, -(Qrow[x]/qii)) for x in states if x != si ]\n uni = uniform(0.0, 1.0)\n val = 0.0\n for sj, prob in qij_probs:\n val += prob\n if uni < val:\n return sj\n \ndef sample_ancstates(node, states, conditionals, n2p, fixed={}):\n \"\"\"\n Sample ancestral states from their conditional likelihoods\n \"\"\"\n ancstates = {}\n for n in node.preiter():\n if n in fixed:\n state = fixed[n]\n else:\n cond = conditionals[n]\n\n if n.parent:\n P = n2p[n]\n ancst = ancstates[n.parent]\n newstate_Prow = P[ancst]\n cond *= newstate_Prow\n\n cond /= sum(cond)\n\n rv = uniform(0.0, 1.0)\n v = 0.0\n for state, c in zip(states, cond):\n v += c\n if rv < v:\n break\n ancstates[n] = state\n\n return ancstates\n", "ivy/contrasts.py": "\"\"\"\nCalculate independent contrasts\n\nTODO: include utilities for transforming data, etc.\n\"\"\"\nfrom __future__ import print_function\n\ndef PIC(node, data, results={}):\n \"\"\"\n Phylogenetic independent contrasts.\n\n Recursively calculate independent contrasts of a bifurcating node\n given a dictionary of trait values.\n\n Args:\n node (Node): A node object\n data (dict): Mapping of leaf names to character values\n\n Returns:\n dict: Mapping of internal nodes to tuples containing ancestral\n state, its variance (error), the contrast, and the\n contrasts's variance.\n\n TODO: modify to accommodate polytomies.\n \"\"\"\n X = []; v = []\n for child in node.children:\n if child.children:\n PIC(child, data, results)\n child_results = results[child]\n X.append(child_results[0])\n v.append(child_results[1])\n else:\n X.append(data[child.label])\n v.append(child.length)\n\n Xi, Xj = X # Xi - Xj is the contrast value\n vi, vj = v\n\n # Xk is the reconstructed state at the node\n Xk = ((1.0/vi)*Xi + (1/vj)*Xj) / (1.0/vi + 1.0/vj)\n\n # vk is the variance\n vk = node.length + (vi*vj)/(vi+vj)\n\n results[node] = (Xk, vk, Xi-Xj, vi+vj)\n\n return results\n\nif __name__ == \"__main__\":\n import tree\n n = tree.read(\n \"((((Homo:0.21,Pongo:0.21)N1:0.28,Macaca:0.49)N2:0.13,\"\\\n \"Ateles:0.62)N3:0.38,Galago:1.00)N4:0.0;\"\n )\n char1 = {\n \"Homo\": 4.09434,\n \"Pongo\": 3.61092,\n \"Macaca\": 2.37024,\n \"Ateles\": 2.02815,\n \"Galago\": -1.46968\n }\n\n for k, v in PIC(n, char1).items():\n print(k.label or k.id, v)\n", "ivy/genbank.py": "import re, sys, logging\nfrom collections import defaultdict\ntry:\n from itertools import izip_longest, ifilter\nexcept:\n from itertools import zip_longest as izip_longest\nfrom Bio import Entrez, SeqIO\nfrom Bio.Blast import NCBIWWW, NCBIXML\nfrom . import storage\n\nemail = \"\"\n\ndef batch(iterable, size):\n \"\"\"\n Take an iterable and return it in chunks (sub-iterables)\n\n Args:\n iterable: Any iterable\n size (int): Size of chunks\n Yields:\n Chunks of size `size`\n \"\"\"\n args = [ iter(iterable) ]*size\n for x in izip_longest(fillvalue=None, *args):\n yield ifilter(None, x)\n\ndef extract_gbac(s):\n \"\"\"\n Extract genbank accession\n\n Args:\n s (str): text string of genbank file\n Returns:\n list: Accession number(s)\n \"\"\"\n gbac_re = re.compile(r'[A-Z]{1,2}[0-9]{4,7}')\n return gbac_re.findall(s, re.M)\n # RR: This also returns various other strings that match the pattern (eg.\n # protein ids)\n\ndef extract_gene(seq, gene):\n \"\"\"\n RR: Not sure what format seq should be in -CZ\n \"\"\"\n for t in \"exon\", \"gene\":\n for x in seq.features:\n if x.type == t:\n v = x.qualifiers.get(\"gene\")\n if v == [gene]:\n if x.sub_features:\n s = [ seq[sf.location.start.position:\n sf.location.end.position]\n for sf in x.sub_features ]\n return reduce(lambda x,y:x+y, s)\n else:\n loc = x.location\n return seq[loc.start.position-10:loc.end.position+10]\n\ndef gi2webenv(gilist):\n h = Entrez.esearch(\n db=\"nucleotide\", term=\" OR \".join(gilist), usehistory=\"y\",\n retmax=len(gilist)\n )\n d = Entrez.read(h)\n return d[\"WebEnv\"], d[\"QueryKey\"]\n\ndef gi2tax(gi):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.elink(dbfrom='taxonomy', db='nucleotide', from_uid=gi,\n LinkName='nucleotide_taxonomy')\n r = Entrez.read(h)[0]\n h.close()\n i = r['LinkSetDb'][0]['Link'][0]['Id']\n h = Entrez.efetch(db='taxonomy', id=i, retmode='xml')\n r = Entrez.read(h)[0]\n h.close()\n return r\n\ndef ac2gi(ac):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.esearch(db=\"nucleotide\", term=ac, retmax=1)\n d = Entrez.read(h)['IdList'][0]\n h.close()\n return d\n\ndef acsum(aclist, batchsize=100):\n \"\"\"\n fetch esummary info for list of accession numbers -- useful for\n getting gi and taxids\n \"\"\"\n global email\n assert email, \"set email!\"\n Entrez.email = email\n results = {}\n for v in batch(aclist, batchsize):\n v = list(v)\n h = Entrez.esearch(\n db=\"nucleotide\", retmax=len(v),\n term=\" OR \".join([ \"%s[ACCN]\" % x for x in v ]),\n usehistory=\"y\"\n )\n d = Entrez.read(h)\n h.close()\n # gis, but not in order of aclist\n gis = d['IdList']\n d = Entrez.read(Entrez.esummary(db='nucleotide', id=','.join(gis)),\n validate=False)\n for x in d:\n ac = x['Caption']\n if ac in aclist:\n results[ac] = x\n return results\n\ndef fetch_aclist(aclist, batchsize=1000):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n results = {}\n n = 0\n for v in batch(aclist, batchsize):\n v = list(v)\n h = Entrez.esearch(\n db=\"nucleotide\",\n term=\" OR \".join([ \"%s[ACCN]\" % x for x in v ]),\n usehistory=\"y\"\n )\n d = Entrez.read(h)\n h.close()\n h = Entrez.efetch(db=\"nucleotide\", rettype=\"gb\", retmax=len(v),\n webenv=d[\"WebEnv\"], query_key=d[\"QueryKey\"])\n seqs = SeqIO.parse(h, \"genbank\")\n for s in seqs:\n try:\n ac = s.annotations[\"accessions\"][0]\n if ac in aclist:\n results[ac] = s\n except:\n pass\n h.close()\n n += len(v)\n logging.info('fetched %s sequences', n)\n return results\n\ndef fetch_gilist(gilist, batchsize=1000):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n results = {}\n for v in batch(gilist, batchsize):\n v = map(str, v)\n h = Entrez.epost(db=\"nucleotide\", id=\",\".join(v), usehistory=\"y\")\n d = Entrez.read(h)\n h.close()\n h = Entrez.efetch(db=\"nucleotide\", rettype=\"gb\", retmax=len(v),\n webenv=d[\"WebEnv\"], query_key=d[\"QueryKey\"])\n seqs = SeqIO.parse(h, \"genbank\")\n for s in seqs:\n try:\n gi = s.annotations[\"gi\"]\n if gi in v:\n s.id = organism_id(s)\n results[gi] = s\n except:\n pass\n h.close()\n return results\n\ndef organism_id(s):\n org = (s.annotations.get('organism') or '').replace('.', '')\n return '%s_%s' % (org.replace(' ','_'), s.id.split('.')[0])\n\ndef fetchseq(gi):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.efetch(db=\"nucleotide\", id=str(gi), rettype=\"gb\")\n s = SeqIO.read(h, 'genbank')\n s.id = organism_id(s)\n return s\n\ndef create_fastas(data, genes):\n fastas = dict([ (g, file(g+\".fasta\", \"w\")) for g in genes ])\n for label, seqs in data.items():\n for gene, s in zip(genes, seqs):\n if s and type(s) != str:\n tag = None\n try:\n tag = \"%s_%s\" % (label, s.annotations[\"accessions\"][0])\n except:\n tag = \"%s_%s\" % (label, s.name)\n if tag:\n fastas[gene].write(\">%s\\n%s\\n\" % (tag, s.seq))\n else:\n sys.stderr.write((\"error: not an accession number? \"\n \"%s (%s %s)\\n\" % (s, label, gene)))\n\n for f in fastas.values(): f.close()\n\ndef merge_fastas(fnames, name=\"merged\"):\n outfile = file(name+\".phy\", \"w\")\n gene2len = {}\n d = defaultdict(dict)\n for fn in fnames:\n gene = fn.split(\".\")[0]\n for rec in SeqIO.parse(file(fn), \"fasta\"):\n #sp = \"_\".join(rec.id.split(\"_\")[:2])\n if rec.id.startswith(\"Pedicularis\"):\n sp = rec.id.split(\"_\")[1]\n else:\n sp = rec.id.split(\"_\")[0]\n sp = \"_\".join(rec.id.split(\"_\")[:-1])\n seq = str(rec.seq)\n d[sp][gene] = seq\n if gene not in gene2len:\n gene2len[gene] = len(seq)\n\n ntax = len(d)\n nchar = sum(gene2len.values())\n outfile.write(\"%s %s\\n\" % (ntax, nchar))\n genes = list(sorted(gene2len.keys()))\n for sp, data in sorted(d.items()):\n s = \"\".join([ (data.get(gene) or \"\".join([\"?\"]*gene2len[gene]))\n for gene in genes ])\n outfile.write(\"%s %s\\n\" % (sp, s))\n outfile.close()\n parts = file(name+\".partitions\", \"w\")\n i = 1\n for g in genes:\n n = gene2len[g]\n parts.write(\"DNA, %s = %s-%s\\n\" % (g, i, i+n-1))\n i += n\n parts.close()\n\ndef blast_closest(fasta, e=10):\n f = NCBIWWW.qblast(\"blastn\", \"nr\", fasta, expect=e, hitlist_size=1)\n rec = NCBIXML.read(f)\n d = rec.descriptions[0]\n result = storage.Storage()\n gi = re.findall(r'gi[|]([0-9]+)', d.title) or None\n if gi: result.gi = int(gi[0])\n ac = re.findall(r'gb[|]([^|]+)', d.title) or None\n if ac: result.ac = ac[0].split(\".\")[0]\n result.title = d.title.split(\"|\")[-1].strip()\n return result\n\ndef blast(query, e=10, n=100, entrez_query=\"\"):\n f = NCBIWWW.qblast(\"blastn\", \"nr\", query, expect=e, hitlist_size=n,\n entrez_query=entrez_query)\n recs = NCBIXML.parse(f)\n return recs\n ## v = []\n ## for d in rec.descriptions:\n ## result = Storage()\n ## gi = re.findall(r'gi[|]([0-9]+)', d.title) or None\n ## if gi: result.gi = int(gi[0])\n ## ac = re.findall(r'gb[|]([^|]+)', d.title) or None\n ## if ac: result.ac = ac[0].split(\".\")[0]\n ## result.title = d.title.split(\"|\")[-1].strip()\n ## v.append(result)\n ## return v\n\ndef start_codons(seq):\n i = seq.find('ATG')\n while i != -1:\n yield i\n i = seq.find('ATG', i+3)\n\ndef search_taxonomy(q):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.esearch(db=\"taxonomy\", term=q)\n return Entrez.read(h)['IdList']\n\ndef fetchtax(taxid):\n global email\n assert email, \"set email!\"\n Entrez.email = email\n n = 1\n if not isinstance(taxid, int):\n # string, possibly with multiple values?\n try:\n taxid = taxid.strip()\n n = taxid.count(',') + 1\n except AttributeError:\n # iterable of values?\n try:\n n = len(taxid)\n taxid = ','.join(map(str, taxid))\n except TypeError:\n pass\n else:\n taxid = str(taxid)\n h = Entrez.efetch(db='taxonomy', id=taxid, retmode='xml', retmax=n)\n if n == 1:\n r = Entrez.read(h)[0]\n else:\n # a list of taxonomy results in same order of taxids\n r = Entrez.read(h)\n return r\n\n__FIRST = re.compile('[^-]')\n__LAST = re.compile('[-]*$')\ndef trimpos(rec):\n 'return the positions of the first and last ungapped base'\n s = rec.seq.tostring()\n first = __FIRST.search(s).start()\n last = __LAST.search(s).start()-1\n return (first, last)\n\ndef fetch_DNA_seqs(terms, maxn=10000, batchsize=1000):\n \"\"\"\n terms: sequence of search terms, quoted appropriately, with Entrez\n specifiers, e.g. ['\"Mus musculus\"[organism]']\n maxn: maximum number of sequences to return\n returns list of SeqRecord objects\n \"\"\"\n global email\n assert email, \"set email!\"\n Entrez.email = email\n h = Entrez.esearch(db=\"nucleotide\", term=\" OR \".join(terms), usehistory=\"y\")\n d = Entrez.read(h)\n env = d['WebEnv']; key = d['QueryKey']\n N = int(d['Count'])\n if maxn: N = min(N, maxn)\n logging.info('fetching %s sequences', N)\n retstart = 0\n seqs = []\n n = 0\n while n < N:\n h = Entrez.efetch(\n db=\"nucleotide\", rettype='gb', webenv=env, query_key=key,\n retstart=retstart, retmax=batchsize\n )\n v = list(SeqIO.parse(h, \"genbank\"))\n n += len(v)\n logging.info('...fetched %s', n)\n seqs.extend(v)\n retstart += batchsize\n logging.info('...done')\n return seqs\n\ndef seqrec_taxid(seqrec):\n \"extract the NCBI taxon id from a sequence record\"\n for ft in seqrec.features:\n if ft.type == 'source':\n break\n try:\n for x in ft.qualifiers['db_xref']:\n if x.startswith('taxon:'):\n return int(x.split(':')[1])\n except:\n pass\n", "ivy/interactive.py": "#!/usr/bin/env ipython\n# -*- coding: utf-8 -*-\n\"\"\"\nAdds to the interactive IPython/pylab environment\n\"\"\"\nimport sys, os, re\nfrom . import tree, align\nfrom . import vis\n\ndef readtree(data, *args, **kwargs):\n return tree.read(data, *args, **kwargs)\n\ndef readaln(data, *args, **kwargs):\n return align.read(data, *args, **kwargs)\n\ndef treefig(*args, **kwargs):\n if len(args) == 1:\n fig = vis.TreeFigure(args[0], **kwargs)\n else:\n fig = vis.MultiTreeFigure(**kwargs)\n for arg in args:\n # print arg\n fig.add(arg)\n fig.show()\n return fig\n\ndef alnfig(*args, **kwargs):\n return vis.AlignmentFigure(*args, **kwargs)\n\ndef __maketree(self, s):\n words = s.split()\n treename = \"root\"\n fname = None\n if words:\n treename = words.pop(0)\n if words and os.path.isfile(words[0]):\n fname = words.pop(0)\n\n if not fname:\n ## msg = \"\\n\".join([\n ## \"Name of tree file\",\n ## \"(Try dragging one into the terminal):\\n\"\n ## ])\n msg = \"Enter the name of a tree file or a newick string:\\n\"\n fname = raw_input(msg).strip()\n\n quotes = [\"'\", '\"']\n if fname and fname[0] in quotes:\n fname = fname[1:]\n if fname and fname[-1] in quotes:\n fname = fname[:-1]\n if fname:\n try:\n ## root = ivy.tree.read(fname)\n ## IPython.ipapi.get().to_user_ns({treename:root})\n cmd = \"%s = ivy.tree.read('%s')\" % (treename, fname)\n get_ipython().ex(cmd)\n print(\"Tree parsed and assigned to variable '%s'\" % treename)\n except:\n print(\"Unable to parse tree file '%s'\" % fname)\n else:\n print(\"Cancelled\")\n\ndef __node_completer(self, event):\n symbol = event.symbol\n s = event.line\n if symbol:\n s = s[:-len(symbol)]\n quote = \"\"\n if s and s[-1] in [\"'\", '\"']:\n quote = s[-1]\n s = s[:-1]\n #base = (re.findall(r'(\\w+)\\[\\Z', s) or [None])[-1]\n base = \"\".join((re.findall(r'(\\w+\\.\\w*)?(\\.)?(\\w+)\\[\\Z', s) or [\"\"])[-1])\n ## print \"symbol:\", symbol\n ## print \"line:\", event.line\n ## print \"s:\", s\n ## print \"quote:\", quote\n ## print \"base:\", base\n ## print \"obj:\", self._ofind(base).get(\"obj\")\n\n obj = None\n if base:\n obj = self._ofind(base).get(\"obj\")\n ## print '\\n'\n ## print 'base', base\n ## print 'obj', obj\n if obj and isinstance(obj, tree.Node):\n completions = [\"'\"]\n if quote:\n completions = sorted([ x.label for x in obj.labeled() ])\n return completions\n\n raise IPython.core.error.TryNext()\n\ntry:\n ## import IPython\n IP = get_ipython() #IPython.ipapi.get()\n IP.magic('matplotlib')\n if IP:\n #IP.expose_magic(\"maketree\", __maketree)\n # IP.define_magic(\"maketree\", __maketree)\n ## IP.set_hook(\n ## \"complete_command\", __node_completer, re_key=r'\\[*'\n ## )\n IP.set_hook(\n \"complete_command\", __node_completer,\n re_key='.+[[]([\\']|[\"])*\\w*$'\n )\n\nexcept:\n print(sys.exc_info()[0])\n sys.stderr.write(\"Magic commands and completers requires IPython >= 0.11\\n\")\n\n## if __name__ == \"__main__\":\n## if len(sys.argv) > 1:\n## for fname in sys.argv[1:]:\n## if os.path.isfile(fname):\n## execfile(fname)\n", "ivy/layout.py": "\"\"\"\nlayout nodes in 2d space\n\nThe function of interest is `calc_node_positions` (aka nodepos)\n\"\"\"\nfrom __future__ import print_function\nimport numpy\n\nclass Coordinates:\n \"\"\"\n Coordinates class for storing xy coordinates\n \"\"\"\n def __init__(self, x=0, y=0):\n self.x = x\n self.y = y\n\n def __repr__(self):\n return \"Coordinates(%g, %g)\" % (self.x, self.y)\n\n def point(self):\n return (self.x, self.y)\n\ndef smooth_xpos(node, n2coords):\n \"\"\"\n RR: What does smoothing do? -CZ\n \"\"\"\n if not node.isleaf:\n children = node.children\n for ch in children:\n smooth_xpos(ch, n2coords)\n\n if node.parent:\n px = n2coords[node.parent].x\n cx = min([ n2coords[ch].x for ch in children ])\n n2coords[node].x = (px + cx)/2.0\n\ndef depth_length_preorder_traversal(node, n2coords=None, isroot=False):\n \"\"\"\n Calculate node depth (root = depth 0) and length to root\n\n Args:\n node (Node): A node object\n\n Returns:\n dict: Mapping of nodes to coordinate objects. Coordinate\n objects have attributes \"depth\" and \"length_to_root\"\n \"\"\"\n if n2coords is None:\n n2coords = {}\n coords = n2coords.get(node) or Coordinates()\n coords.node = node\n if (not node.parent) or isroot:\n coords.depth = 0\n coords.length_to_root = 0.0\n else:\n try:\n p = n2coords[node.parent]\n coords.depth = p.depth + 1\n coords.length_to_root = p.length_to_root + (node.length or 0.0)\n except KeyError:\n print(node.label, node.parent.label)\n except AttributeError:\n coords.depth = 0\n coords.length_to_root = 0\n n2coords[node] = coords\n\n for ch in node.children:\n depth_length_preorder_traversal(ch, n2coords, False)\n\n return n2coords\n\ndef calc_node_positions(node, width, height,\n lpad=0, rpad=0, tpad=0, bpad=0,\n scaled=True, smooth=True, n2coords=None):\n \"\"\"\n Calculate where nodes should be positioned in 2d space for drawing a tree\n\n Args:\n node (Node): A (root) node\n width (float): The width of the canvas\n height (float): The height of the canvas\n lpad, rpad, tpad, bpad (float): Padding on the edges of the canvas.\n Optional, defaults to 0.\n scaled (bool): Whether or not the tree is scaled. Optional, defaults to\n True.\n smooth (bool): Whether or not to smooth the tree. Optional, defaults to\n True.\n Returns:\n dict: Mapping of nodes to Coordinates object\n Notes:\n Origin is at upper left\n \"\"\"\n width -= (lpad + rpad)\n height -= (tpad + bpad)\n\n if n2coords is None:\n n2coords = {}\n depth_length_preorder_traversal(node, n2coords=n2coords)\n leaves = node.leaves()\n nleaves = len(leaves)\n maxdepth = max([ n2coords[lf].depth for lf in leaves ])\n unitwidth = width/float(maxdepth)\n unitheight = height/(nleaves-1.0)\n\n xoff = (unitwidth * 0.5)\n yoff = (unitheight * 0.5)\n\n if scaled:\n maxlen = max([ n2coords[lf].length_to_root for lf in leaves ])\n scale = width/maxlen\n\n for i, lf in enumerate(leaves):\n c = n2coords[lf]\n c.y = i * unitheight\n if not scaled:\n c.x = width\n else:\n c.x = c.length_to_root * scale\n\n for n in node.postiter():\n c = n2coords[n]\n if (not n.isleaf) and n.children:\n children = n.children\n ymax = n2coords[children[0]].y\n ymin = n2coords[children[-1]].y\n c.y = (ymax + ymin)/2.0\n if not scaled:\n c.x = min([ n2coords[ch].x for ch in children ]) - unitwidth\n else:\n c.x = c.length_to_root * scale\n\n if (not scaled) and smooth:\n for i in range(10):\n smooth_xpos(node, n2coords)\n\n for coords in n2coords.values():\n coords.x += lpad\n coords.y += tpad\n\n for n, coords in n2coords.items():\n if n.parent:\n p = n2coords[n.parent]\n coords.px = p.x; coords.py = p.y\n else:\n coords.px = None; coords.py = None\n\n return n2coords\n\nnodepos = calc_node_positions\n\ndef cartesian(node, xscale=1.0, leafspace=None, scaled=True, n2coords=None,\n smooth=0, array=numpy.array, ones=numpy.ones, yunit=None):\n \"\"\"\n RR: What is the difference between this function and calc_node_positions?\n Is it being used anywhere? -CZ\n \"\"\"\n\n if n2coords is None:\n n2coords = {}\n\n depth_length_preorder_traversal(node, n2coords, True)\n leaves = node.leaves()\n nleaves = len(leaves)\n\n # leafspace is a vector that should sum to nleaves\n if leafspace is None:\n try: leafspace = [ float(x.leafspace) for x in leaves ]\n except: leafspace = numpy.zeros((nleaves,))\n assert len(leafspace) == nleaves\n #leafspace = array(leafspace)/(sum(leafspace)/float(nleaves))\n\n maxdepth = max([ n2coords[lf].depth for lf in leaves ])\n depth = maxdepth * xscale\n #if not yunit: yunit = 1.0/nleaves\n yunit = 1.0\n\n if scaled:\n maxlen = max([ n2coords[lf].length_to_root for lf in leaves ])\n depth = maxlen\n\n y = 0\n for i, lf in enumerate(leaves):\n c = n2coords[lf]\n yoff = 1 + (leafspace[i] * yunit)\n c.y = y + yoff*0.5\n y += yoff\n if not scaled:\n c.x = depth\n else:\n c.x = c.length_to_root\n\n for n in node.postiter():\n c = n2coords[n]\n if not n.isleaf:\n children = n.children\n v = [n2coords[children[0]].y, n2coords[children[-1]].y]\n v.sort()\n ymin, ymax = v\n c.y = (ymax + ymin)/2.0\n if not scaled:\n c.x = min([ n2coords[ch].x for ch in children ]) - 1.0\n else:\n c.x = c.length_to_root\n\n if not scaled:\n for i in range(smooth):\n smooth_xpos(node, n2coords)\n\n return n2coords\n\nif __name__ == \"__main__\":\n import tree\n node = tree.read(\"(a:3,(b:2,(c:4,d:5):1,(e:3,(f:1,g:1):2):2):2);\")\n for i, n in enumerate(node.iternodes()):\n if not n.isleaf:\n n.label = \"node%s\" % i\n node.label = \"root\"\n n2c = calc_node_positions(node, width=10, height=10, scaled=True)\n\n from pprint import pprint\n pprint(n2c)\n", "ivy/ltt.py": "\"\"\"\nCompute lineages through time\n\"\"\"\nimport numpy\n\n# RR: Should results be set to None and then defined in the function to avoid\n# problems with mutable defaults in functions? -CZ\ndef traverse(node, t=0, results=[]):\n \"\"\"\n Recursively traverse the tree and collect information about when\n nodes split and how many lineages are added by its splitting.\n \"\"\"\n if node.children:\n ## if not node.label:\n ## node.label = str(node.id)\n results.append((t, len(node.children)-1))\n for child in node.children:\n traverse(child, t+child.length, results)\n return results\n\ndef ltt(node):\n \"\"\"\n Calculate lineages through time. The tree is assumed to be an\n ultrametric chronogram (extant leaves, with branch lengths\n proportional to time).\n\n Args:\n node (Node): A node object. All nodes should have branch lengths.\n\n Returns:\n tuple: (times, diversity) - 1D-arrays containing the results.\n \"\"\"\n v = traverse(node) # v is a list of (time, diversity) values\n v.sort()\n # for plotting, it is easiest if x and y values are in separate\n # sequences, so we create a transposed array from v\n times, diversity = numpy.array(v).transpose()\n return times, diversity.cumsum()\n\ndef test():\n import newick, ascii\n n = newick.parse(\"(((a:1,b:2):3,(c:3,d:1):1,(e:0.5,f:3):2.5):1,g:4);\")\n v = ltt(n)\n print(ascii.render(n, scaled=1))\n for t, n in v:\n print(t, n)\n\nif __name__ == \"__main__\":\n test()\n", "ivy/newick.py": "\"\"\"\nParse newick strings.\n\nThe function of interest is `parse`, which returns the root node of\nthe parsed tree.\n\"\"\"\nfrom __future__ import print_function, absolute_import, division, unicode_literals\nimport string, sys, re, shlex, itertools\ntry:\n from cStringIO import StringIO\nexcept:\n from io import StringIO\n\n## def read(s):\n## try:\n## s = file(s).read()\n## except:\n## try:\n## s = s.read()\n## except:\n## pass\n## return parse(s)\n\nLABELCHARS = '-.|/?#&'\nMETA = re.compile(r'([^,=\\s]+)\\s*=\\s*(\\{[^=}]*\\}|\"[^\"]*\"|[^,]+)?')\n\ndef add_label_chars(chars):\n global LABELCHARS\n LABELCHARS += chars\n\nclass Error(Exception):\n pass\n\nclass Tokenizer(shlex.shlex):\n \"\"\"Provides tokens for parsing newick strings.\"\"\"\n def __init__(self, infile):\n global LABELCHARS\n shlex.shlex.__init__(self, infile, posix=False)\n self.commenters = ''\n self.wordchars = self.wordchars+LABELCHARS\n self.quotes = \"'\"\n\n def parse_embedded_comment(self):\n ws = self.whitespace\n self.whitespace = \"\"\n v = []\n while 1:\n token = self.get_token()\n if token == '':\n sys.stdout.write('EOF encountered mid-comment!\\n')\n break\n elif token == ']':\n break\n elif token == '[':\n self.parse_embedded_comment()\n else:\n v.append(token)\n self.whitespace = ws\n return \"\".join(v)\n ## print \"comment:\", v\n\ndef parse(data, ttable=None, treename=None):\n \"\"\"\n Parse a newick string.\n\n Args:\n data: Any file-like object that can be coerced into shlex, or\n a string (converted to StringIO)\n ttable (dict): Mapping of node labels in the newick string\n to other values.\n\n Returns:\n Node: The root node.\n \"\"\"\n from .tree import Node\n\n if isinstance(data, str):\n data = StringIO(data)\n\n start_pos = data.tell()\n tokens = Tokenizer(data)\n\n node = None; root = None\n lp=0; rp=0; rooted=1\n\n previous = None\n\n ni = 0 # node id counter (preorder) - zero-based indexing\n li = 0 # leaf index counter\n ii = 0 # internal node index counter\n pi = 0 # postorder sequence\n while 1:\n token = tokens.get_token()\n #print token,\n if token == ';' or token == tokens.eof:\n assert lp == rp, \\\n \"unbalanced parentheses in tree description: (%s, %s)\" \\\n % (lp, rp)\n break\n\n # internal node\n elif token == '(':\n lp = lp+1\n newnode = Node()\n newnode.ni = ni; ni += 1\n newnode.isleaf = False\n newnode.ii = ii; ii += 1\n newnode.treename = treename\n if node:\n if node.children: newnode.left = node.children[-1].right+1\n else: newnode.left = node.left+1\n node.add_child(newnode)\n else:\n newnode.left = 1; newnode.right = 2\n newnode.right = newnode.left+1\n node = newnode\n\n elif token == ')':\n rp = rp+1\n node = node.parent\n node.pi = pi; pi += 1\n if node.children:\n node.right = node.children[-1].right + 1\n\n elif token == ',':\n node = node.parent\n if node.children:\n node.right = node.children[-1].right + 1\n\n # branch length\n elif token == ':':\n token = tokens.get_token()\n if token == '[':\n node.length_comment = tokens.parse_embedded_comment()\n token = tokens.get_token()\n\n if not (token == ''):\n try: brlen = float(token)\n except ValueError as exc:\n raise ValueError(\n \"invalid literal for branch length, '{}'\".format(token))\n else:\n raise Error('unexpected end-of-file (expecting branch length)')\n\n node.length = brlen\n # comment\n elif token == '[':\n node.comment = tokens.parse_embedded_comment()\n if node.comment[0] == '&':\n # metadata\n meta = META.findall(node.comment[1:])\n if meta:\n for k, v in meta:\n v = eval(v.replace('{','(').replace('}',')'))\n node.meta[k] = v\n\n # leaf node or internal node label\n else:\n if previous != ')': # leaf node\n if ttable:\n try:\n ttoken = (ttable.get(int(token)) or\n ttable.get(token))\n except ValueError:\n ttoken = ttable.get(token)\n if ttoken:\n token = ttoken\n newnode = Node()\n newnode.ni = ni; ni += 1\n newnode.pi = pi; pi += 1\n newnode.label = \"_\".join(token.split()).replace(\"'\", \"\")\n newnode.isleaf = True\n newnode.li = li; li += 1\n if node.children: newnode.left = node.children[-1].right+1\n else: newnode.left = node.left+1\n newnode.right = newnode.left+1\n newnode.treename = treename\n node.add_child(newnode)\n node = newnode\n else: # label\n if ttable:\n node.label = ttable.get(token, token)\n else:\n node.label = token\n\n previous = token\n node.isroot = True\n return node\n\n## def string(node, length_fmt=\":%s\", end=True, newline=True):\n## \"Recursively create a newick string from node.\"\n## if not node.isleaf:\n## node_str = \"(%s)%s\" % \\\n## (\",\".join([ string(child, length_fmt, False, newline) \\\n## for child in node.children ]),\n## node.label or \"\"\n## )\n## else:\n## node_str = \"%s\" % node.label\n\n## if node.length is not None:\n## length_str = length_fmt % node.length\n## else:\n## length_str = \"\"\n\n## semicolon = \"\"\n## if end:\n## if not newline:\n## semicolon = \";\"\n## else:\n## semicolon = \";\\n\"\n## s = \"%s%s%s\" % (node_str, length_str, semicolon)\n## return s\n\n## def from_nexus(infile, bufsize=None):\n## bufsize = bufsize or 1024*5000\n## TTABLE = re.compile(r'\\btranslate\\s+([^;]+);', re.I | re.M)\n## TREE = re.compile(r'\\btree\\s+([_.\\w]+)\\s*=[^(]+(\\([^;]+;)', re.I | re.M)\n## s = infile.read(bufsize)\n## ttable = TTABLE.findall(s) or None\n## if ttable:\n## items = [ shlex.split(line) for line in ttable[0].split(\",\") ]\n## ttable = dict([ (k, v.replace(\" \", \"_\")) for k, v in items ])\n## trees = TREE.findall(s)\n## ## for i, t in enumerate(trees):\n## ## t = list(t)\n## ## if ttable:\n## ## t[1] = \"\".join(\n## ## [ ttable.get(x, \"_\".join(x.split()).replace(\"'\", \"\"))\n## ## for x in shlex.shlex(t[1]) ]\n## ## )\n## ## trees[i] = t\n## ## return trees\n## return ttable, trees\n\ndef parse_ampersand_comment(s):\n import pyparsing\n pyparsing.ParserElement.enablePackrat()\n from pyparsing import Word, Literal, QuotedString, CaselessKeyword, \\\n OneOrMore, Group, Optional, Suppress, Regex, Dict\n word = Word(string.ascii_letters+string.digits+\"%_\")\n key = word.setResultsName(\"key\") + Suppress(\"=\")\n single_value = (Word(string.ascii_letters+string.digits+\"-.\") |\n QuotedString(\"'\") |\n QuotedString('\"'))\n range_value = Group(Suppress(\"{\") +\n single_value.setResultsName(\"min\") +\n Suppress(\",\") +\n single_value.setResultsName(\"max\") +\n Suppress(\"}\"))\n pair = (key + (single_value | range_value).setResultsName(\"value\"))\n g = OneOrMore(pair)\n d = []\n for x in g.searchString(s):\n v = x.value\n if isinstance(v, str):\n try: v = float(v)\n except ValueError: pass\n else:\n try: v = map(float, v.asList())\n except ValueError: pass\n d.append((x.key, v))\n return d\n\n# def test_parse_comment():\n# v = ((\"height_median=1.1368683772161603E-13,height=9.188229043880098E-14,\"\n# \"height_95%_HPD={5.6843418860808015E-14,1.7053025658242404E-13},\"\n# \"height_range={0.0,2.8421709430404007E-13}\"),\n# \"R\", \"lnP=-154.27154502342688,lnP=-24657.14341301901\",\n# 'states=\"T-lateral\"')\n# for s in v:\n# print \"input:\", s\n# print dict(parse_ampersand_comment(s))\n", "ivy/nexus.py": "from __future__ import print_function\nimport itertools\nfrom . import newick\n\nclass Newick(object):\n \"\"\"\n convenience class for storing the results of a newick tree\n record from a nexus file, as parsed by newick.nexus_iter\n \"\"\"\n def __init__(self, parse_results=None, ttable={}):\n self.name = \"\"\n self.comment = \"\"\n self.root_comment = \"\"\n self.newick = \"\"\n self.ttable = ttable\n if parse_results: self.populate(parse_results)\n\n def populate(self, parse_results, ttable={}):\n self.name = parse_results.tree_name\n self.comment = parse_results.tree_comment\n self.root_comment = parse_results.root_comment\n self.newick = parse_results.newick\n if ttable: self.ttable = ttable\n\n def parse(self, newick=newick):\n assert self.newick\n self.root = newick.parse(\n self.newick, ttable=self.ttable, treename=self.name\n )\n return self.root\n\ndef fetchaln(fname):\n \"\"\"Fetch alignment\"\"\"\n from Bio.Nexus import Nexus\n n = Nexus.Nexus(fname)\n return n\n\ndef split_blocks(infile):\n try:\n from cStringIO import StringIO\n except:\n from io import StringIO\n dropwhile = itertools.dropwhile; takewhile = itertools.takewhile\n blocks = []\n not_begin = lambda s: not s.lower().startswith(\"begin\")\n not_end = lambda s: not s.strip().lower() in (\"end;\", \"endblock;\")\n while 1:\n f = takewhile(not_end, dropwhile(not_begin, infile))\n try:\n b = f.next().split()[-1][:-1]\n blocks.append((b, StringIO(\"\".join(list(f)))))\n except StopIteration:\n break\n return blocks\n\ndef parse_treesblock(infile):\n import string\n from pyparsing import Optional, Word, Regex, CaselessKeyword, Suppress\n from pyparsing import QuotedString\n comment = Optional(Suppress(\"[&\") + Regex(r'[^]]+') + Suppress(\"]\"))\n name = Word(alphanums+\"_\") | QuotedString(\"'\")\n newick = Regex(r'[^;]+;')\n tree = (CaselessKeyword(\"tree\").suppress() +\n Optional(\"*\").suppress() +\n name.setResultsName(\"tree_name\") +\n comment.setResultsName(\"tree_comment\") +\n Suppress(\"=\") +\n comment.setResultsName(\"root_comment\") +\n newick.setResultsName(\"newick\"))\n ## treesblock = Group(beginblock +\n ## Optional(ttable.setResultsName(\"ttable\")) +\n ## Group(OneOrMore(tree)) +\n ## endblock)\n\n def parse_ttable(f):\n ttable = {}\n while True:\n s = f.next().strip()\n if s.lower() == \";\":\n break\n if s[-1] in \",;\":\n s = s[:-1]\n k, v = s.split()\n ttable[k] = v\n if s[-1] == \";\":\n break\n return ttable\n\n ttable = {}\n while True:\n try:\n s = infile.next().strip()\n except StopIteration:\n break\n if s.lower() == \"translate\":\n ttable = parse_ttable(infile)\n # print(\"ttable: %s\" % len(ttable))\n else:\n match = tree.parseString(s)\n yield Newick(match, ttable)\n\ndef iter_trees(infile):\n import pyparsing\n pyparsing.ParserElement.enablePackrat()\n from pyparsing import (\n Word, Literal, QuotedString, CaselessKeyword, CharsNotIn,\n OneOrMore, Group, Optional, Suppress, Regex, Dict, ZeroOrMore,\n alphanums, nums)\n comment = Optional(Suppress(\"[&\") + Regex(r'[^]]+') + Suppress(\"]\"))\n name = Word(alphanums+\"_.\") | QuotedString(\"'\")\n newick = Regex(r'[^;]+;')\n tree = (CaselessKeyword(\"tree\").suppress() +\n Optional(\"*\").suppress() +\n name.setResultsName(\"tree_name\") +\n comment.setResultsName(\"tree_comment\") +\n Suppress(\"=\") +\n comment.setResultsName(\"root_comment\") +\n newick.setResultsName(\"newick\"))\n\n def not_begin(s):\n # print('not_begin', s)\n return s.strip().lower() != \"begin trees;\"\n def not_end(s):\n # print('not_end', s)\n return s.strip().lower() not in (\"end;\", \"endblock;\")\n def parse_ttable(f):\n ttable = {}\n # com = Suppress('[') + ZeroOrMore(CharsNotIn(']')) + Suppress(']')\n com = Suppress('[' + ZeroOrMore(CharsNotIn(']') + ']'))\n while True:\n s = next(f).strip()\n if not s:\n continue\n s = com.transformString(s).strip()\n if s.lower() == \";\":\n break\n b = False\n if s[-1] in \",;\":\n if s[-1] == ';':\n b = True\n s = s[:-1]\n # print(s)\n k, v = s.split()\n ttable[k] = v\n if b:\n break\n return ttable\n\n # read lines between \"begin trees;\" and \"end;\"\n f = itertools.takewhile(not_end, itertools.dropwhile(not_begin, infile))\n s = next(f).strip().lower()\n if s != \"begin trees;\":\n print(\"Expecting 'begin trees;', got %s\" % s, file=sys.stderr)\n raise StopIteration\n ttable = {}\n while True:\n try:\n s = next(f).strip()\n except StopIteration:\n break\n if not s:\n continue\n if s.lower() == \"translate\":\n ttable = parse_ttable(f)\n # print \"ttable: %s\" % len(ttable)\n elif s.split()[0].lower()=='tree':\n match = tree.parseString(s)\n yield Newick(match, ttable)\n", "ivy/sequtil.py": "try:\n from itertools import izip, imap\nexcept ImportError:\n izip = zip\n imap = map\nimport numpy\n\ndef finditer(seq, substr, start=0):\n \"\"\"\n Find substrings within a sequence\n\n Args:\n seq (str): A sequence.\n substr (str): A subsequence to search for\n start (int): Starting index. Defaults to 0\n Yields:\n int: Starting indicies of where the substr was found in seq\n \"\"\"\n N = len(substr)\n i = seq.find(substr, start)\n while i >= 0:\n yield i\n i = seq.find(substr, i+N)\n\ndef gapidx(seq, gapchar='-'):\n \"\"\"\n For a sequence with gaps, calculate site positions without gaps\n\n Args:\n seq (list): Each element of the list is one character in a sequence.\n gapchar (str): The character gaps are coded as. Defaults to '-'\n Returns:\n array: An array where the first element corresponds to range(number of\n characters that are not gaps) and the second element is the indicies\n of all characters that are not gaps.\n \"\"\"\n a = numpy.array(seq)\n idx = numpy.arange(len(a))\n nongap = idx[a != gapchar]\n return numpy.array((numpy.arange(len(nongap)), nongap))\n\ndef find_stop_codons(seq, pos=0):\n \"\"\"\n Find stop codons within sequence (in reading frame)\n\n Args:\n seq (str): A sequence\n pos (int): Starting position. Defaults to 0.\n Yields:\n tuple: The index where the stop codon starts\n and which stop codon was found.\n \"\"\"\n s = seq[pos:]\n it = iter(s)\n g = imap(lambda x:\"\".join(x), izip(it, it, it))\n for i, x in enumerate(g):\n if x in (\"TAG\", \"TAA\", \"TGA\"):\n yield pos+(i*3), x\n", "ivy/storage.py": "from __future__ import print_function\nfrom operator import itemgetter\nfrom heapq import nlargest\nfrom itertools import repeat\ntry:\n from itertools import ifilter\nexcept:\n ifilter = filter\n\nclass Storage(dict):\n \"\"\"\n A Storage object is like a dictionary except `obj.foo` can be used\n in addition to `obj['foo']`.\n\n From web2py/gluon/storage.py by Massimo Di Pierro (www.web2py.com)\n \"\"\"\n def __getattr__(self, key):\n try:\n return self[key]\n except KeyError:\n return None\n\n def __setattr__(self, key, value):\n self[key] = value\n\n def __delattr__(self, key):\n del self[key]\n\n def __repr__(self):\n return ''\n\n def __getstate__(self):\n return dict(self)\n\n def __setstate__(self, value):\n for (k, v) in value.items():\n self[k] = v\n\nclass MaxDict(dict):\n def __setitem__(self, key, value):\n v = self.get(key)\n if value > v:\n dict.__setitem__(self, key, value)\n \n#from http://code.activestate.com/recipes/576611/\nclass Counter(dict):\n \"\"\"Dict subclass for counting hashable objects. Sometimes called a bag\n or multiset. Elements are stored as dictionary keys and their counts\n are stored as dictionary values.\n\n >>> Counter('zyzygy')\n Counter({'y': 3, 'z': 2, 'g': 1})\n\n \"\"\"\n\n def __init__(self, iterable=None, **kwds):\n \"\"\"Create a new, empty Counter object. And if given, count elements\n from an input iterable. Or, initialize the count from another mapping\n of elements to their counts.\n\n >>> c = Counter() # a new, empty counter\n >>> c = Counter('gallahad') # a new counter from an iterable\n >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping\n >>> c = Counter(a=4, b=2) # a new counter from keyword args\n\n \"\"\" \n self.update(iterable, **kwds)\n\n def __missing__(self, key):\n return 0\n\n def most_common(self, n=None):\n \"\"\"List the n most common elements and their counts from the most\n common to the least. If n is None, then list all element counts.\n\n >>> Counter('abracadabra').most_common(3)\n [('a', 5), ('r', 2), ('b', 2)]\n\n \"\"\" \n if n is None:\n return sorted(self.iteritems(), key=itemgetter(1), reverse=True)\n return nlargest(n, self.iteritems(), key=itemgetter(1))\n\n def elements(self):\n \"\"\"Iterator over elements repeating each as many times as its count.\n\n >>> c = Counter('ABCABC')\n >>> sorted(c.elements())\n ['A', 'A', 'B', 'B', 'C', 'C']\n\n If an element's count has been set to zero or is a negative number,\n elements() will ignore it.\n\n \"\"\"\n for elem, count in self.iteritems():\n for _ in repeat(None, count):\n yield elem\n\n # Override dict methods where the meaning changes for Counter objects.\n\n @classmethod\n def fromkeys(cls, iterable, v=None):\n raise NotImplementedError(\n 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')\n\n def update(self, iterable=None, **kwds):\n \"\"\"Like dict.update() but add counts instead of replacing them.\n\n Source can be an iterable, a dictionary, or another Counter instance.\n\n >>> c = Counter('which')\n >>> c.update('witch') # add elements from another iterable\n >>> d = Counter('watch')\n >>> c.update(d) # add elements from another counter\n >>> c['h'] # four 'h' in which, witch, and watch\n 4\n\n \"\"\" \n if iterable is not None:\n if hasattr(iterable, 'iteritems'):\n if self:\n self_get = self.get\n for elem, count in iterable.iteritems():\n self[elem] = self_get(elem, 0) + count\n else:\n dict.update(self, iterable) # fast path when counter is empty\n else:\n self_get = self.get\n for elem in iterable:\n self[elem] = self_get(elem, 0) + 1\n if kwds:\n self.update(kwds)\n\n def copy(self):\n 'Like dict.copy() but returns a Counter instance instead of a dict.'\n return Counter(self)\n\n def __delitem__(self, elem):\n 'Like dict.__delitem__() but does not raise KeyError for missing values.'\n if elem in self:\n dict.__delitem__(self, elem)\n\n def __repr__(self):\n if not self:\n return '%s()' % self.__class__.__name__\n items = ', '.join(map('%r: %r'.__mod__, self.most_common()))\n return '%s({%s})' % (self.__class__.__name__, items)\n\n # Multiset-style mathematical operations discussed in:\n # Knuth TAOCP Volume II section 4.6.3 exercise 19\n # and at http://en.wikipedia.org/wiki/Multiset\n #\n # Outputs guaranteed to only include positive counts.\n #\n # To strip negative and zero counts, add-in an empty counter:\n # c += Counter()\n\n def __add__(self, other):\n \"\"\"Add counts from two counters.\n\n >>> Counter('abbb') + Counter('bcc')\n Counter({'b': 4, 'c': 2, 'a': 1})\n\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n result = Counter()\n for elem in set(self) | set(other):\n newcount = self[elem] + other[elem]\n if newcount > 0:\n result[elem] = newcount\n return result\n\n def __sub__(self, other):\n \"\"\" Subtract count, but keep only results with positive counts.\n\n >>> Counter('abbbc') - Counter('bccd')\n Counter({'b': 2, 'a': 1})\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n result = Counter()\n for elem in set(self) | set(other):\n newcount = self[elem] - other[elem]\n if newcount > 0:\n result[elem] = newcount\n return result\n\n def __or__(self, other):\n \"\"\"Union is the maximum of value in either of the input counters.\n\n >>> Counter('abbb') | Counter('bcc')\n Counter({'b': 3, 'c': 2, 'a': 1})\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n _max = max\n result = Counter()\n for elem in set(self) | set(other):\n newcount = _max(self[elem], other[elem])\n if newcount > 0:\n result[elem] = newcount\n return result\n\n def __and__(self, other):\n \"\"\" Intersection is the minimum of corresponding counts.\n\n >>> Counter('abbb') & Counter('bcc')\n Counter({'b': 1})\n\n \"\"\"\n if not isinstance(other, Counter):\n return NotImplemented\n _min = min\n result = Counter()\n if len(self) < len(other):\n self, other = other, self\n for elem in ifilter(self.__contains__, other):\n newcount = _min(self[elem], other[elem])\n if newcount > 0:\n result[elem] = newcount\n return result\n\ndef convert(d):\n \"convert a (potentially nested) dict to Storage\"\n from types import DictType\n t = type(d)\n if t == DictType:\n for k,v in d.items():\n d[k] = convert(v)\n return Storage(d)\n return d\n\nif __name__ == '__main__':\n import doctest\n print(doctest.testmod())\n", "ivy/tree.py": "\"\"\"\nThe Node class and functions for creating trees from Newick strings,\netc.\n\nivy does not have a Tree class per se, as most functions operate\ndirectly on Node objects.\n\"\"\"\nimport os\n# from storage import Storage\nfrom copy import copy as _copy\n# from matrix import vcv\nfrom . import newick, nexus\n# from itertools import izip_longest\n\n## class Tree(object):\n## \"\"\"\n## A simple Tree class.\n## \"\"\"\n## def __init__(self, data=None, format=\"newick\", name=None, ttable=None):\n## self.root = None\n## if data:\n## self.root = read(data, format, name, ttable)\n## self.name = name\n## self.ttable = ttable\n\n## def __getattribute__(self, a):\n## r = object.__getattribute__(self, 'root')\n## try:\n## return object.__getattribute__(r, a)\n## except AttributeError:\n## return object.__getattribute__(self, a)\n\ndef traverse(node):\n \"recursive preorder iterator based solely on .children attribute\"\n yield node\n for child in node.children:\n for descendant in traverse(child):\n yield descendant\n\nclass Node(object):\n \"\"\"\n A basic Node class with attributes and references to child nodes\n ('children', a list) and 'parent'.\n\n Keyword Args:\n id: ID of the node. If not provided, is set using\n builtin id function\n ni (int): Node index.\n li (int): Leaf index.\n isroot (bool): Is the node a root.\n isleaf (bool): Is the node a leaf.\n label (str): Node label.\n length (float): Branch length from node to parent\n support: RR: Are these bootstrap support values? -CZ\n age (float): Age of the node in time units.\n parent (Node): Parent of the ndoe.\n children (list): List of node objects. Children of node\n nchildren (int): No. of children\n left: RR: Unsure what left and right mean -CZ\n treename: Name of tree\n comment: Comments for tree\n\n \"\"\"\n def __init__(self, **kwargs):\n self.id = None\n self.ni = None # node index\n self.li = None # leaf index\n self.isroot = False\n self.isleaf = False\n self.label = None\n self.length = None\n self.support = None\n self.age = None\n self.parent = None\n self.children = []\n self.nchildren = 0\n self.left = None\n self.right = None\n self.treename = \"\"\n self.comment = \"\"\n self.meta = {}\n ## self.length_comment = \"\"\n ## self.label_comment = \"\"\n if kwargs:\n for k, v in kwargs.items():\n setattr(self, k, v)\n if self.id is None:\n self.id = id(self)\n\n def __copy__(self):\n return self.copy()\n\n def __repr__(self):\n v = []\n if self.isroot:\n v.append(\"root\")\n elif self.isleaf:\n v.append(\"leaf\")\n\n if self.label:\n v.append(\"'%s'\" % self.label)\n\n s = \", \".join(v)\n\n nid = self.ni if (self.ni is not None) else self.id\n\n if s:\n s = \"Node(%s, %s)\" % (nid, s)\n else:\n s = \"Node(%s)\" % nid\n return s\n\n\n def __contains__(self, other):\n \"\"\"\n For use with `in` keyword\n\n Args:\n other: Another node or node label.\n Returns:\n bool: Whether or not the other node is a descendant of self\n \"\"\"\n if other and isinstance(other, str):\n for x in self:\n if other == x.label:\n return True\n return False\n else:\n assert isinstance(self, Node)\n for x in self.iternodes():\n if other == x:\n return True\n return False\n\n def __iter__(self):\n for node in self.iternodes():\n yield node\n\n def __len__(self):\n \"\"\"\n Number of nodes descended from self\n\n Returns:\n int: Number of nodes descended from self (including self)\n \"\"\"\n i = 0\n for n in self:\n i += 1\n return i\n\n def __nonzero__(self):\n return True\n\n def __getitem__(self, x):\n \"\"\"\n Args:\n x: A Node, Node.id (int) or a Node.label (string)\n\n Returns:\n Node: Found node(s)\n\n \"\"\"\n for n in self:\n if n==x or n.id==x or n.ni == x or (n.label and n.label==x):\n return n\n raise IndexError(str(x))\n\n def ascii(self, *args, **kwargs):\n \"\"\"\n Create ascii tree.\n\n Keyword Args:\n unitlen (float): How long each unit should be rendered as.\n Defaults to 3.\n minwidth (float): Minimum width of the plot. Defaults to 50\n maxwidth (float): Maximum width of the plot. Defaults to None\n scaled (bool): Whether or not the tree is scaled. Defaults to False\n show_internal_labels (bool): Whether or not to show labels\n on internal nodes. Defaults to True.\n Returns:\n str: Ascii tree to be shown with print().\n \"\"\"\n from . import ascii as _ascii\n return _ascii.render(self, *args, **kwargs)\n\n def collapse(self, add=False):\n \"\"\"\n Remove self and collapse children to polytomy\n\n Args:\n add (bool): Whether or not to add self's length to children's\n length.\n\n Returns:\n Node: Parent of self\n\n \"\"\"\n assert self.parent\n p = self.prune()\n for c in self.children:\n p.add_child(c)\n if add and (c.length is not None):\n c.length += self.length\n self.children = []\n return p\n\n def copy(self, recurse=False):\n \"\"\"\n Return a copy of the node, but not copies of children, parent,\n or any attribute that is a Node.\n\n If `recurse` is True, recursively copy child nodes.\n\n Args:\n recurse (bool): Whether or not to copy children as well as self.\n\n Returns:\n Node: A copy of self.\n\n TODO: test this function.\n\n RR: This function runs rather slowly -CZ\n \"\"\"\n newnode = Node()\n for attr, value in self.__dict__.items():\n if (attr not in (\"children\", \"parent\") and\n not isinstance(value, Node)):\n setattr(newnode, attr, _copy(value))\n if recurse:\n newnode.children = [\n child.copy(True) for child in self.children\n ]\n return newnode\n\n def leafsets(self, d=None, labels=False):\n \"\"\"return a mapping of nodes to leaf sets (nodes or labels)\"\"\"\n d = d or {}\n if not self.isleaf:\n s = set()\n for child in self.children:\n if child.isleaf:\n if labels:\n s.add(child.label)\n else:\n s.add(child)\n else:\n d = child.leafsets(d, labels)\n s = s | d[child]\n d[self] = frozenset(s)\n return d\n\n def mrca(self, *nodes):\n \"\"\"\n Find most recent common ancestor of *nodes*\n\n Args:\n *nodes (Node): Node objects\n Returns:\n Node: The MRCA of *nodes*\n \"\"\"\n if len(nodes) == 1:\n nodes = tuple(nodes[0])\n if len(nodes) == 1:\n return nodes[0]\n nodes = set([ self[n] for n in nodes ])\n anc = []\n def f(n):\n seen = set()\n for c in n.children: seen.update(f(c))\n if n in nodes: seen.add(n)\n if seen == nodes and (not anc): anc.append(n)\n return seen\n f(self)\n return anc[0]\n\n ## def mrca(self, *nodes):\n ## \"\"\"\n ## Find most recent common ancestor of *nodes*\n ## \"\"\"\n ## if len(nodes) == 1:\n ## nodes = tuple(nodes[0])\n ## if len(nodes) == 1:\n ## return nodes[0]\n ## ## assert len(nodes) > 1, (\n ## ## \"Need more than one node for mrca(), got %s\" % nodes\n ## ## )\n ## def f(x):\n ## if isinstance(x, Node):\n ## return x\n ## elif type(x) in types.StringTypes:\n ## return self.find(x)\n ## nodes = map(f, nodes)\n ## assert all(filter(lambda x: isinstance(x, Node), nodes))\n\n ## #v = [ list(n.rootpath()) for n in nodes if n in self ]\n ## v = [ list(x) for x in izip_longest(*[ reversed(list(n.rootpath()))\n ## for n in nodes if n in self ]) ]\n ## if len(v) == 1:\n ## return v[0][0]\n ## anc = None\n ## for x in v:\n ## s = set(x)\n ## if len(s) == 1: anc = list(s)[0]\n ## else: break\n ## return anc\n\n def ismono(self, *leaves):\n \"\"\"\n Test if leaf descendants are monophyletic\n\n Args:\n *leaves (Node): At least two leaf Node objects\n\n Returns:\n bool: Are the leaf descendants monophyletic?\n\n RR: Should this function have a check to make sure the input nodes are\n leaves? There is some strange behavior if you input internal nodes -CZ\n \"\"\"\n if len(leaves) == 1:\n leaves = list(leaves)[0]\n assert len(leaves) > 1, (\n \"Need more than one leaf for ismono(), got %s\" % leaves\n )\n anc = self.mrca(leaves)\n if anc:\n return bool(len(anc.leaves())==len(leaves))\n\n def order_subtrees_by_size(self, n2s=None, recurse=False, reverse=False):\n \"\"\"\n Order interal clades by size\n\n \"\"\"\n if n2s is None:\n n2s = clade_sizes(self)\n if not self.isleaf:\n v = [ (n2s[c], c.label or '', c.id, c) for c in self.children ]\n v.sort()\n if reverse:\n v.reverse()\n self.children = [ x[-1] for x in v ]\n if recurse:\n for c in self.children:\n c.order_subtrees_by_size(n2s, recurse=True, reverse=reverse)\n\n def ladderize(self, reverse=False):\n self.order_subtrees_by_size(recurse=True, reverse=reverse)\n return self\n\n def add_child(self, child):\n \"\"\"\n Add child as child of self\n\n Args:\n child (Node): A node object\n\n \"\"\"\n assert child not in self.children\n self.children.append(child)\n child.parent = self\n child.isroot = False\n self.nchildren += 1\n\n def bisect_branch(self):\n \"\"\"\n Add new node as parent to self in the middle of branch to parent.\n\n Returns:\n Node: A new node.\n\n \"\"\"\n assert self.parent\n parent = self.prune()\n n = Node()\n if self.length:\n n.length = self.length/2.0\n self.length /= 2.0\n parent.add_child(n)\n n.add_child(self)\n return n\n\n def remove_child(self, child):\n \"\"\"\n Remove child.\n\n Args:\n child (Node): A node object that is a child of self\n\n \"\"\"\n assert child in self.children\n self.children.remove(child)\n child.parent = None\n self.nchildren -= 1\n if not self.children:\n self.isleaf = True\n\n def labeled(self):\n \"\"\"\n Return a list of all descendant nodes that are labeled\n\n Returns:\n list: All descendants of self that are labeled (including self)\n \"\"\"\n return [ n for n in self if n.label ]\n\n def leaves(self, f=None):\n \"\"\"\n Return a list of leaves. Can be filtered with f.\n\n Args:\n f (function): A function that evaluates to True if called with desired\n node as the first input\n\n Returns:\n list: A list of leaves that are true for f (if f is given)\n\n \"\"\"\n if f: return [ n for n in self if (n.isleaf and f(n)) ]\n return [ n for n in self if n.isleaf ]\n\n def internals(self, f=None):\n \"\"\"\n Return a list nodes that have children (internal nodes)\n\n Args:\n f (function): A function that evaluates to true if called with desired\n node as the first input\n\n Returns:\n list: A list of internal nodes that are true for f (if f is given)\n\n \"\"\"\n if f: return [ n for n in self if (n.children and f(n)) ]\n return [ n for n in self if n.children ]\n\n def clades(self):\n \"\"\"\n Get internal nodes descended from self\n\n Returns:\n list: A list of internal nodes descended from (and not including) self.\n\n \"\"\"\n return [ n for n in self if (n is not self) and not n.isleaf ]\n\n def iternodes(self, f=None):\n \"\"\"\n Return a generator of nodes descendant from self - including self\n\n Args:\n f (function): A function that evaluates to true if called with\n desired node as the first input\n\n Yields:\n Node: Nodes descended from self (including self) in\n preorder sequence\n\n \"\"\"\n if (f and f(self)) or (not f):\n yield self\n for child in self.children:\n for n in child.iternodes(f):\n yield n\n\n def iterleaves(self):\n \"\"\"\n Yield leaves descendant from self\n \"\"\"\n return self.iternodes(lambda x:x.isleaf)\n\n def preiter(self, f=None):\n \"\"\"\n Yield nodes in preorder sequence\n \"\"\"\n for n in self.iternodes(f=f):\n yield n\n\n def postiter(self, f=None):\n \"\"\"\n Yield nodes in postorder sequence\n \"\"\"\n if not self.isleaf:\n for child in self.children:\n for n in child.postiter():\n if (f and f(n)) or (not f):\n yield n\n if (f and f(self)) or (not f):\n yield self\n\n def descendants(self, order=\"pre\", v=None, f=None):\n \"\"\"\n Return a list of nodes descendant from self - but _not_\n including self!\n\n Args:\n order (str): Indicates wether to return nodes in preorder or\n postorder sequence. Optional, defaults to \"pre\"\n f (function): filtering function that evaluates to True if desired\n node is called as the first parameter.\n\n Returns:\n list: A list of nodes descended from self not including self.\n\n \"\"\"\n v = v or []\n for child in self.children:\n if (f and f(child)) or (not f):\n if order == \"pre\":\n v.append(child)\n else:\n v.insert(0, child)\n if child.children:\n child.descendants(order, v, f)\n return v\n\n def get(self, f, *args, **kwargs):\n \"\"\"\n Return the first node found by node.find()\n\n Args:\n f (function): A function that evaluates to True if desired\n node is called as the first parameter.\n Returns:\n Node: The first node found by node.find()\n\n \"\"\"\n v = self.find(f, *args, **kwargs)\n try:\n return v.next()\n except StopIteration:\n return None\n\n def grep(self, s, ignorecase=True):\n \"\"\"\n Find nodes by regular-expression search of labels\n\n Args:\n s (str): String to search.\n ignorecase (bool): Indicates to ignore case. Defaults to true.\n\n Returns:\n lsit: A list of node objects whose labels were matched by s.\n\n \"\"\"\n import re\n if ignorecase:\n pattern = re.compile(s, re.IGNORECASE)\n else:\n pattern = re.compile(s)\n\n search = pattern.search\n return [ x for x in self if x.label and search(x.label) ]\n\n def lgrep(self, s, ignorecase=True):\n \"\"\"\n Find leaves by regular-expression search of labels\n\n Args:\n s (str): String to search.\n ignorecase (bool): Indicates to ignore case. Defaults to true.\n\n Returns:\n lsit: A list of node objects whose labels were matched by s.\n\n \"\"\"\n return [ x for x in self.grep(s, ignorecase=ignorecase) if x.isleaf ]\n\n def bgrep(self, s, ignorecase=True):\n \"\"\"\n Find branches (internal nodes) by regular-expression search of\n labels\n\n Args:\n s (str): String to search.\n ignorecase (bool): Indicates to ignore case. Defaults to true.\n\n Returns:\n lsit: A list of node objects whose labels were matched by s.\n\n \"\"\"\n return [ x for x in self.grep(s, ignorecase=ignorecase) if\n (not x.isleaf) ]\n\n def find(self, f, *args, **kwargs):\n \"\"\"\n Find descendant nodes.\n\n Args:\n f: Function or a string. If a string, it is converted to a\n function for finding *f* as a substring in node labels.\n Otherwise, *f* should evaluate to True if called with a desired\n node as the first parameter.\n\n Yields:\n Node: Found nodes in preorder sequence.\n\n \"\"\"\n if not f:\n return\n if isinstance(f, str):\n func = lambda x: (f or None) in (x.label or \"\")\n else:\n func = f\n for n in self.iternodes():\n if func(n, *args, **kwargs):\n yield n\n\n def findall(self, f, *args, **kwargs):\n \"\"\"Return a list of found nodes.\"\"\"\n return list(self.find(f, *args, **kwargs))\n\n def prune(self):\n \"\"\"\n Remove self if self is not root.\n\n Returns:\n Node: Parent of self. If parent had only two children,\n parent is now a 'knee' and can be removed with excise.\n\n \"\"\"\n p = self.parent\n if p:\n p.remove_child(self)\n return p\n\n def excise(self):\n \"\"\"\n For 'knees': remove self from between parent and single child\n \"\"\"\n assert self.parent\n assert len(self.children)==1\n p = self.parent\n c = self.children[0]\n if c.length is not None and self.length is not None:\n c.length += self.length\n c.prune()\n self.prune()\n p.add_child(c)\n return p\n\n def graft(self, node):\n \"\"\"\n Add node as sister to self.\n \"\"\"\n parent = self.parent\n parent.remove_child(self)\n n = Node()\n n.add_child(self)\n n.add_child(node)\n parent.add_child(n)\n\n ## def leaf_distances(self, store=None, measure=\"length\"):\n ## \"\"\"\n ## for each internal node, calculate the distance to each leaf,\n ## measured in branch length or internodes\n ## \"\"\"\n ## if store is None:\n ## store = {}\n ## leaf2len = {}\n ## if self.children:\n ## for child in self.children:\n ## if measure == \"length\":\n ## dist = child.length\n ## elif measure == \"nodes\":\n ## dist = 1\n ## child.leaf_distances(store, measure)\n ## if child.isleaf:\n ## leaf2len[child] = dist\n ## else:\n ## for k, v in store[child].items():\n ## leaf2len[k] = v + dist\n ## else:\n ## leaf2len[self] = {self: 0}\n ## store[self] = leaf2len\n ## return store\n\n def leaf_distances(self, measure=\"length\"):\n \"\"\"\n RR: I don't quite understand the structure of the output. Also,\n I can't figure out what \"measure\" does.-CZ\n \"\"\"\n from collections import defaultdict\n store = defaultdict(lambda:defaultdict(lambda:0))\n nodes = [ x for x in self if x.children ]\n for lf in self.leaves():\n x = lf.length\n for n in lf.rootpath(self):\n store[n][lf] = x\n x += (n.length or 0)\n return store\n\n def rootpath(self, end=None, stop=None):\n \"\"\"\n Iterate over parent nodes toward the root, or node *end* if\n encountered.\n\n Args:\n end (Node): A Node object to iterate to (instead of iterating\n towards root). Optional, defaults to None\n stop (function): A function that returns True if desired node is called\n as the first parameter. Optional, defaults to None\n\n Yields:\n Node: Nodes in path to root (or end).\n\n \"\"\"\n n = self.parent\n while 1:\n if n is None: raise StopIteration\n yield n\n if n.isroot or (end and n == end) or (stop and stop(n)):\n raise StopIteration\n n = n.parent\n\n def rootpath_length(self, end=None):\n \"\"\"\n Get length from self to root(if end is None) or length\n from self to an ancestor node (if end is an ancestor to self)\n\n Args:\n end (Node): A node object\n\n Returns:\n float: The length from self to root/end\n\n \"\"\"\n n = self\n x = 0.0\n while n.parent:\n x += n.length\n if n.parent == end:\n break\n n = n.parent\n return x\n ## f = lambda x:x.parent==end\n ## v = [self.length]+[ x.length for x in self.rootpath(stop=f)\n ## if x.parent ]\n ## assert None not in v\n ## return sum(v)\n\n def max_tippath(self, first=True):\n \"\"\"\n Get the maximum length from self to a leaf node\n \"\"\"\n v = 0\n if self.children:\n v = max([ c.max_tippath(False) for c in self.children ])\n if not first:\n if self.length is None: v += 1\n else: v += self.length\n return v\n\n def subtree_mapping(self, labels, clean=False):\n \"\"\"\n Find the set of nodes in 'labels', and create a new tree\n representing the subtree connecting them. Nodes are assumed\n to be non-nested.\n\n Returns:\n dict: a mapping of old nodes to new nodes and vice versa.\n\n TODO: test this, high bug probability\n \"\"\"\n d = {}\n oldtips = [ x for x in self.leaves() if x.label in labels ]\n for tip in oldtips:\n path = list(tip.rootpath())\n for node in path:\n if node not in d:\n newnode = Node()\n newnode.isleaf = node.isleaf\n newnode.length = node.length\n newnode.label = node.label\n d[node] = newnode\n d[newnode] = node\n else:\n newnode = d[node]\n\n for child in node.children:\n if child in d:\n newchild = d[child]\n if newchild not in newnode.children:\n newnode.add_child(newchild)\n d[\"oldroot\"] = self\n d[\"newroot\"] = d[self]\n if clean:\n n = d[\"newroot\"]\n while 1:\n if n.nchildren == 1:\n oldnode = d[n]\n del d[oldnode]; del d[n]\n child = n.children[0]\n child.parent = None\n child.isroot = True\n d[\"newroot\"] = child\n d[\"oldroot\"] = d[child]\n n = child\n else:\n break\n\n for tip in oldtips:\n newnode = d[tip]\n while 1:\n newnode = newnode.parent\n oldnode = d[newnode]\n if newnode.nchildren == 1:\n child = newnode.children[0]\n if newnode.length:\n child.length += newnode.length\n newnode.remove_child(child)\n if newnode.parent:\n parent = newnode.parent\n parent.remove_child(newnode)\n parent.add_child(child)\n del d[oldnode]; del d[newnode]\n if not newnode.parent:\n break\n\n return d\n\n def reroot_orig(self, newroot):\n assert newroot in self\n self.isroot = False\n newroot.isroot = True\n v = []\n n = newroot\n while 1:\n v.append(n)\n if not n.parent: break\n n = n.parent\n v.reverse()\n for i, cp in enumerate(v[:-1]):\n node = v[i+1]\n # node is current node; cp is current parent\n cp.remove_child(node)\n node.add_child(cp)\n cp.length = node.length\n return newroot\n\n def reroot(self, newroot):\n \"\"\"\n RR: I can't get this to work properly -CZ\n \"\"\"\n newroot = self[newroot]\n assert newroot in self\n self.isroot = False\n n = newroot\n v = list(n.rootpath())\n v.reverse()\n for node in (v+[n])[1:]:\n # node is current node; cp is current parent\n cp = node.parent\n cp.remove_child(node)\n node.add_child(cp)\n cp.length = node.length\n cp.label = node.label\n newroot.isroot = True\n return newroot\n\n def makeroot(self, shift_labels=False):\n \"\"\"\n shift_labels: flag to shift internal parent-child node labels\n when internode polarity changes; suitable e.g. if internal node\n labels indicate unrooted bipartition support\n \"\"\"\n v = list(self.rootpath())\n v[-1].isroot = False\n v.reverse()\n for node in v[1:] + [self]:\n # node is current node; cp is current parent\n cp = node.parent\n cp.remove_child(node)\n node.add_child(cp)\n cp.length = node.length\n if shift_labels:\n cp.label = node.label\n self.isroot = True\n return self\n\n def write(self, outfile=None, format=\"newick\", length_fmt=\":%g\", end=True,\n clobber=False):\n if format==\"newick\":\n s = write_newick(self, outfile, length_fmt, True, clobber)\n if not outfile:\n return s\n\n\nreroot = Node.reroot\n\ndef index(node, n=0, d=0):\n \"\"\"\n recursively attach 'next', 'back', (and 'left', 'right'), 'ni',\n 'ii', 'pi', and 'node_depth' attributes to nodes\n \"\"\"\n node.next = node.left = n\n if not node.parent:\n node.node_depth = d\n else:\n node.node_depth = node.parent.node_depth + 1\n n += 1\n for i, c in enumerate(node.children):\n if i > 0:\n n = node.children[i-1].back + 1\n index(c, n)\n\n if node.children:\n node.back = node.right = node.children[-1].back + 1\n else:\n node.back = node.right = n\n return node.back\n\ndef remove_singletons(root, add=True):\n \"Remove descendant nodes that are the sole child of their parent\"\n for leaf in root.leaves():\n for n in leaf.rootpath():\n if n.parent and len(n.parent.children)==1:\n n.collapse(add)\n\ndef cls(root):\n \"\"\"\n Get clade sizes of whole tree\n Args:\n * root: A root node\n\n Returns:\n * A dict mapping nodes to clade sizes\n\n \"\"\"\n results = {}\n for node in root.postiter():\n if node.isleaf:\n results[node] = 1\n else:\n results[node] = sum(results[child] for child in node.children)\n return results\n\ndef clade_sizes(node, results={}):\n \"\"\"Map node and descendants to number of descendant tips\"\"\"\n size = int(node.isleaf)\n if not node.isleaf:\n for child in node.children:\n clade_sizes(child, results)\n size += results[child]\n results[node] = size\n return results\n\ndef write(node, outfile=None, format=\"newick\", length_fmt=\":%g\",\n clobber=False):\n if format==\"newick\" or (isinstance(outfile, str) and\n outfile.endswith(\".newick\") or\n outfile.endswith(\".new\")):\n s = write_newick(node, outfile, length_fmt, True, clobber)\n if not outfile:\n return s\n\ndef write_newick(node, outfile=None, length_fmt=\":%g\", end=False,\n clobber=False):\n if not node.isleaf:\n node_str = \"(%s)%s\" % \\\n (\",\".join([ write_newick(child, outfile, length_fmt,\n False, clobber)\n for child in node.children ]),\n (node.label or \"\")\n )\n else:\n node_str = \"%s\" % node.label\n\n if node.length is not None:\n length_str = length_fmt % node.length\n else:\n length_str = \"\"\n\n semicolon = \"\"\n if end:\n semicolon = \";\"\n s = \"%s%s%s\" % (node_str, length_str, semicolon)\n if end and outfile:\n flag = False\n if isinstance(outfile, str):\n if not clobber:\n assert not os.path.isfile(outfile), \"File '%s' exists! (Set clobber=True to overwrite)\" % outfile\n flag = True\n outfile = open(outfile, \"w\")\n outfile.write(s)\n if flag:\n outfile.close()\n return s\n\ndef read(data, format=None, treename=None, ttable=None):\n \"\"\"\n Read a single tree from *data*, which can be a Newick string, a\n file name, or a file-like object with `tell` and 'read`\n methods. *treename* is an optional string that will be attached to\n all created nodes.\n\n Args:\n data: A file or file-like object or newick string\n\n Returns:\n Node: The root node.\n \"\"\"\n\n def strip(s):\n fname = os.path.split(s)[-1]\n head, tail = os.path.splitext(fname)\n tail = tail.lower()\n if tail in (\".nwk\", \".tre\", \".tree\", \".newick\", \".nex\"):\n return head\n else:\n return fname\n\n if (not format):\n if isinstance(data, str) and os.path.isfile(data):\n s = data.lower()\n for tail in \".nex\", \".nexus\", \".tre\":\n if s.endswith(tail):\n format=\"nexus\"\n break\n\n if (not format):\n format = \"newick\"\n\n if format == \"newick\":\n if isinstance(data, str):\n if os.path.isfile(data):\n treename = strip(data)\n return newick.parse(open(data), treename=treename,\n ttable=ttable)\n else:\n return newick.parse(data, ttable=ttable)\n\n elif (hasattr(data, \"tell\") and hasattr(data, \"read\")):\n treename = strip(getattr(data, \"name\", None))\n return newick.parse(data, treename=treename, ttable=ttable)\n elif format == \"nexus-dendropy\":\n import dendropy\n if isinstance(data, str):\n if os.path.isfile(data):\n treename = strip(data)\n return newick.parse(\n str(dendropy.Tree.get_from_path(data, \"nexus\")),\n treename=treename\n )\n else:\n return newick.parse(\n str(dendropy.Tree.get_from_string(data, \"nexus\"))\n )\n\n elif (hasattr(data, \"tell\") and hasattr(data, \"read\")):\n treename = strip(getattr(data, \"name\", None))\n return newick.parse(\n str(dendropy.Tree.get_from_stream(data, \"nexus\")),\n treename=treename\n )\n else:\n pass\n\n elif format == \"nexus\":\n if isinstance(data, str):\n if os.path.isfile(data):\n with open(data) as infile:\n nexiter = nexus.iter_trees(infile)\n rec = next(nexiter)\n if rec:\n return rec.parse()\n else:\n nexiter = nexus.iter_trees(StringIO(data))\n else:\n nexiter = nexus.iter_trees(data)\n rec = next(nexiter)\n if rec:\n return rec.parse()\n else:\n # implement other tree formats here (nexus, nexml etc.)\n raise IOError(\"format '%s' not implemented yet\" % format)\n\n raise IOError(\"unable to read tree from '%s'\" % data)\n\ndef readmany(data, format=\"newick\"):\n \"\"\"Iterate over trees from a source.\"\"\"\n if isinstance(data, str):\n if os.path.isfile(data):\n data = open(data)\n else:\n data = StringIO(data)\n\n if format == \"newick\":\n for line in data:\n yield newick.parse(line)\n elif format == \"nexus\":\n for rec in newick.nexus_iter(data):\n yield rec.parse()\n else:\n raise Exception(\"format '%s' not recognized\" % format)\n data.close()\n\n## def randomly_resolve(n):\n## assert len(n.children)>2\n\n## def leaf_mrcas(root):\n## from itertools import product, izip, tee\n## from collections import OrderedDict\n## from numpy import empty\n## mrca = OrderedDict()\n## def pairwise(iterable, tee=tee, izip=izip):\n## a, b = tee(iterable)\n## next(b, None)\n## return izip(a, b)\n## def f(n):\n## if n.isleaf:\n## od = OrderedDict(); od[n] = n.length\n## return od\n## d = [ f(c) for c in n.children ]\n## for i, j in pairwise(xrange(len(d))):\n## di = d[i]; dj =d[j]\n## for ni, niv in di.iteritems():\n## for nj, njv in dj.iteritems():\n## mrca[(ni,nj)] = n\n## d[j].update(di)\n## return d[j]\n## f(root)\n## return mrca\n\ndef C(leaves, internals):\n from scipy.sparse import lil_matrix\n m = lil_matrix((len(internals), len(leaves)))\n for lf in leaves:\n v = lf.length if lf.length is not None else 1\n for n in lf.rootpath():\n m[n.ii,lf.li] = v\n v += n.length if n.length is not None else 1\n return m.tocsc()\n", "ivy/treebase.py": "\"\"\"\nFunctions to get trees and character data from treebase\n\"\"\"\n\ntry:\n from urllib2 import urlopen\nexcept ImportError:\n from urllib.request import urlopen\nfrom lxml import etree\nfrom . import storage\nimport sys, re\n\n# \"http://purl.org/phylo/treebase/phylows/study/TB2:S11152\"\n\nTREEBASE_WEBSERVICE = \"http://purl.org/phylo/treebase/phylows\"\nNEXML_NAMESPACE = \"http://www.nexml.org/2009\"\nNEXML = \"{%s}\" % NEXML_NAMESPACE\nUNIPROT = \"http://purl.uniprot.org/taxonomy/\"\nNAMEBANK = (\"http://www.ubio.org/authority/metadata.php?\"\n \"lsid=urn:lsid:ubio.org:namebank:\")\n\nROW_SEGMENTS = (\"http://treebase.org/treebase-web/search/study/\"\n \"rowSegmentsTSV.html?matrixid=\")\n\nMETA_DATATYPE = {\n \"xsd:long\": int,\n \"xsd:integer\": int,\n \"xsd:string\": str\n }\n\nAMBIG_RE = re.compile(r'([{][a-zA-Z]+[}])')\n\ndef fetch_study(study_id, format=\"nexml\"):\n \"\"\"\n Get a study from treebase in one of various formats\n\n Args:\n study_id (str): The id of the study\n format (str): One of [\"rdf\", \"html\", \"nexml\", \"nexus\"]\n Returns:\n Str representing a nexus file (if format = \"nexus\")\n\n OR\n\n An lxml etree object\n \"\"\"\n try: study_id = \"S%s\" % int(study_id)\n except ValueError: pass\n\n # format is one of [\"rdf\", \"html\", \"nexml\", \"nexus\"]\n url = \"%s/study/TB2:%s?format=%s\" % (TREEBASE_WEBSERVICE, study_id, format)\n if format==\"nexus\":\n return urlopen(url).read()\n else:\n return etree.parse(url)\n\ndef parse_chars(e, otus):\n v = []\n for chars in e.findall(NEXML+\"characters\"):\n c = storage.Storage(chars.attrib)\n c.states = parse_states(chars)\n c.meta = storage.Storage()\n for meta in chars.findall(NEXML+\"meta\"):\n a = meta.attrib\n if a.get(\"content\"):\n value = META_DATATYPE[a[\"datatype\"]](a[\"content\"])\n c.meta[a[\"property\"]] = value\n c.matrices = []\n for matrix in chars.findall(NEXML+\"matrix\"):\n m = storage.Storage()\n m.rows = []\n for row in matrix.findall(NEXML+\"row\"):\n r = storage.Storage(row.attrib)\n r.otu = otus[r.otu]\n s = row.findall(NEXML+\"seq\")[0].text\n substrs = []\n for ss in AMBIG_RE.split(s):\n if ss.startswith(\"{\"):\n key = frozenset(ss[1:-1])\n val = c.states.states2symb.get(key)\n if key and not val:\n sys.stderr.write(\"missing ambig symbol for %s\\n\" %\n \"\".join(sorted(key)))\n ss = val or \"?\"\n substrs.append(ss)\n s = \"\".join(substrs)\n r.seq = s\n m.rows.append(r)\n c.matrices.append(m)\n v.append(c)\n return v\n\ndef parse_trees(e, otus):\n \"\"\"\n Get trees from an etree object\n\n Args:\n e: A nexml document parsed by etree\n otus: OTUs returned by parse_otus\n Returns:\n list: A list of ivy Storage objects each\n containing every node of a tree.\n \"\"\"\n from tree import Node\n v = []\n for tb in e.findall(NEXML+\"trees\"):\n for te in tb.findall(NEXML+\"tree\"):\n t = storage.Storage()\n t.attrib = storage.Storage(te.attrib)\n t.nodes = {}\n for n in te.findall(NEXML+\"node\"):\n node = Node()\n if n.attrib.get(\"otu\"):\n node.isleaf = True\n node.otu = otus[n.attrib[\"otu\"]]\n node.label = node.otu.label\n t.nodes[n.attrib[\"id\"]] = node\n for edge in te.findall(NEXML+\"edge\"):\n d = edge.attrib\n n = t.nodes[d[\"target\"]]\n p = t.nodes[d[\"source\"]]\n length = d.get(\"length\")\n if length:\n n.length = float(length)\n p.add_child(n)\n r = [ n for n in t.nodes.values() if not n.parent ]\n assert len(r)==1\n r = r[0]\n r.isroot = True\n for i, n in enumerate(r): n.id = i+1\n t.root = r\n v.append(t)\n return v\n\ndef parse_otus(e):\n \"\"\"\n Get OTUs from an etree object\n\n Args:\n e: A nexml document parsed by etree\n Returns:\n dict: A dict mapping keys to OTUs contained in ivy Storage objects\n \"\"\"\n v = {}\n for otus in e.findall(NEXML+\"otus\"):\n for x in otus.findall(NEXML+\"otu\"):\n otu = storage.Storage()\n otu.id = x.attrib[\"id\"]\n otu.label = x.attrib[\"label\"]\n for meta in x.iterchildren():\n d = meta.attrib\n p = d.get(\"property\")\n if p and p == \"tb:identifier.taxon\":\n otu.tb_taxid = d[\"content\"]\n elif p and p == \"tb:identifier.taxonVariant\":\n otu.tb_taxid_variant = d[\"content\"]\n h = d.get(\"href\")\n if h and h.startswith(NAMEBANK):\n otu.namebank_id = int(h.replace(NAMEBANK, \"\"))\n elif h and h.startswith(UNIPROT):\n otu.ncbi_taxid = int(h.replace(UNIPROT, \"\"))\n v[otu.id] = otu\n return v\n\ndef parse_nexml(doc):\n \"\"\"\n Parse an etree ElementTree\n\n Args:\n doc: An etree ElementTree or a file that can be parsed into\n an etree ElementTree with etree.parse\n Returns:\n An ivy Storage object containing all the information from the\n nexml file: Characters, metadata, OTUs, and trees.\n \"\"\"\n if not isinstance(doc, (etree._ElementTree, etree._Element)):\n doc = etree.parse(doc)\n meta = {}\n for child in doc.findall(NEXML+\"meta\"):\n if \"content\" in child.attrib:\n d = child.attrib\n key = d[\"property\"]\n val = META_DATATYPE[d[\"datatype\"]](d[\"content\"])\n if (key in meta) and val:\n if isinstance(meta[key], list):\n meta[key].append(val)\n else:\n meta[key] = [meta[key], val]\n else:\n meta[key] = val\n\n otus = parse_otus(doc)\n\n return Storage(meta = meta,\n otus = otus,\n chars = parse_chars(doc, otus),\n trees = parse_trees(doc, otus))\n\ndef parse_states(e):\n \"\"\"e is a characters element\"\"\"\n f = e.findall(NEXML+\"format\")[0]\n sts = f.findall(NEXML+\"states\")[0]\n states2symb = {}\n symb2states = {}\n id2symb = {}\n for child in sts.iterchildren():\n t = child.tag\n if t == NEXML+\"state\":\n k = child.attrib[\"id\"]\n v = child.attrib[\"symbol\"]\n id2symb[k] = v\n states2symb[v] = v\n symb2states[v] = v\n elif t == NEXML+\"uncertain_state_set\":\n k = child.attrib[\"id\"]\n v = child.attrib[\"symbol\"]\n id2symb[k] = v\n memberstates = []\n for memb in child.findall(NEXML+\"member\"):\n sid = memb.attrib[\"state\"]\n symb = id2symb[sid]\n for x in symb2states[symb]: memberstates.append(x)\n memberstates = frozenset(memberstates)\n symb2states[v] = memberstates\n states2symb[memberstates] = v\n return Storage(states2symb=states2symb,\n symb2states=symb2states,\n id2symb=id2symb)\n\ndef parse_charsets(study_id):\n from cStringIO import StringIO\n nx = StringIO(fetch_study(study_id, 'nexus'))\n d = {}\n for line in nx.readlines():\n if line.strip().startswith(\"CHARSET \"):\n v = line.strip().split()\n label = v[1]\n first, last = map(int, line.split()[-1][:-1].split(\"-\"))\n d[label] = (first-1, last-1)\n return d\n\nif __name__ == \"__main__\":\n import sys\n from pprint import pprint\n e = fetch_study('S11152', 'nexus')\n #print e\n #e.write(sys.stdout, pretty_print=True)\n\n ## e = etree.parse('/tmp/tmp.xml')\n ## x = parse_nexml(e)\n ## pprint(x)\n", "ivy/vis/alignment.py": "\"\"\"\ninteractive viewers for trees, etc. using matplotlib\n\"\"\"\nfrom collections import defaultdict\nfrom .. import align, sequtil\nimport matplotlib, numpy\nimport matplotlib.pyplot as pyplot\nfrom matplotlib.figure import SubplotParams\nfrom matplotlib.axes import Axes, subplot_class_factory\nfrom matplotlib.patches import Rectangle\nfrom matplotlib.collections import PatchCollection\nfrom matplotlib.widgets import RectangleSelector\nfrom matplotlib import colors as mpl_colors\nfrom matplotlib.ticker import MaxNLocator, FuncFormatter, NullLocator\nfrom mpl_toolkits.axes_grid1.inset_locator import inset_axes\nfrom Bio.Align import MultipleSeqAlignment\n\nmatplotlib.rcParams['path.simplify'] = False\n\nclass UpdatingRect(Rectangle):\n def __call__(self, p):\n self.set_bounds(*p.viewLim.bounds)\n p.figure.canvas.draw_idle()\n\nclass AlignmentFigure:\n def __init__(self, aln, name=None, div=0.25, overview=True):\n if isinstance(aln, MultipleSeqAlignment):\n self.aln = aln\n else:\n self.aln = align.read(aln)\n self.name = name\n self.div_value = div\n pars = SubplotParams(\n left=0.2, right=1, bottom=0.05, top=1, wspace=0.01\n )\n fig = pyplot.figure(subplotpars=pars, facecolor=\"white\")\n self.figure = fig\n self.initialize_subplots(overview)\n self.show()\n self.connect_events()\n \n def initialize_subplots(self, overview=False):\n ## p = AlignmentPlot(self.figure, 212, aln=self.aln)\n p = AlignmentPlot(self.figure, 111, aln=self.aln, app=self)\n self.detail = self.figure.add_subplot(p)\n self.detail.plot_aln()\n if overview:\n self.overview = inset_axes(\n self.detail, width=\"30%\", height=\"20%\", loc=1\n )\n self.overview.xaxis.set_major_locator(NullLocator())\n self.overview.yaxis.set_major_locator(NullLocator())\n self.overview.imshow(\n self.detail.array, interpolation='nearest', aspect='auto',\n origin='lower'\n )\n rect = UpdatingRect(\n [0,0], 0, 0, facecolor='black', edgecolor='cyan', alpha=0.5\n )\n self.overview.zoomrect = rect\n rect.target = self.detail\n self.detail.callbacks.connect('xlim_changed', rect)\n self.detail.callbacks.connect('ylim_changed', rect)\n self.overview.add_patch(rect)\n rect(self.overview)\n\n else:\n self.overview = None\n \n def show(self):\n self.figure.show()\n\n def connect_events(self):\n mpl_connect = self.figure.canvas.mpl_connect\n mpl_connect(\"button_press_event\", self.onclick)\n mpl_connect(\"button_release_event\", self.onbuttonrelease)\n mpl_connect(\"scroll_event\", self.onscroll)\n mpl_connect(\"pick_event\", self.onpick)\n mpl_connect(\"motion_notify_event\", self.ondrag)\n mpl_connect(\"key_press_event\", self.onkeypress)\n mpl_connect(\"axes_enter_event\", self.axes_enter)\n mpl_connect(\"axes_leave_event\", self.axes_leave)\n\n @staticmethod\n def axes_enter(e):\n ax = e.inaxes\n ax._active = True\n\n @staticmethod\n def axes_leave(e):\n ax = e.inaxes\n ax._active = False\n\n @staticmethod\n def onselect(estart, estop):\n b = estart.button\n ## print b, estart.key\n\n @staticmethod\n def onkeypress(e):\n ax = e.inaxes\n k = e.key\n if ax and k:\n if k == 't':\n ax.home()\n elif k == \"down\":\n ax.scroll(0, -0.1)\n elif k == \"up\":\n ax.scroll(0, 0.1)\n elif k == \"left\":\n ax.scroll(-0.1, 0)\n elif k == \"right\":\n ax.scroll(0.1, 0)\n elif k in '=+':\n ax.zoom(0.1,0.1)\n elif k == '-':\n ax.zoom(-0.1,-0.1)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def ondrag(e):\n ax = e.inaxes\n button = e.button\n if ax and button == 2:\n if not ax.pan_start:\n ax.pan_start = (e.xdata, e.ydata)\n return\n x, y = ax.pan_start\n xdelta = x - e.xdata\n ydelta = y - e.ydata\n x0, x1 = ax.get_xlim()\n xspan = x1-x0\n y0, y1 = ax.get_ylim()\n yspan = y1 - y0\n midx = (x1+x0)*0.5\n midy = (y1+y0)*0.5\n ax.set_xlim(midx+xdelta-xspan*0.5, midx+xdelta+xspan*0.5)\n ax.set_ylim(midy+ydelta-yspan*0.5, midy+ydelta+yspan*0.5)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onbuttonrelease(e):\n ax = e.inaxes\n button = e.button\n if button == 2:\n ## print \"pan end\"\n ax.pan_start = None\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onpick(e):\n ax = e.mouseevent.inaxes\n if ax:\n ax.picked(e)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onscroll(e):\n ax = e.inaxes\n if ax:\n b = e.button\n ## print b\n k = e.key\n if k == None and b ==\"up\":\n ax.zoom(0.1,0.1)\n elif k == None and b ==\"down\":\n ax.zoom(-0.1,-0.1)\n elif k == \"shift\" and b == \"up\":\n ax.zoom_cxy(0.1, 0, e.xdata, e.ydata)\n elif k == \"shift\" and b == \"down\":\n ax.zoom_cxy(-0.1, 0, e.xdata, e.ydata)\n elif k == \"control\" and b == \"up\":\n ax.zoom_cxy(0, 0.1, e.xdata, e.ydata)\n elif k == \"control\" and b == \"down\":\n ax.zoom_cxy(0, -0.1, e.xdata, e.ydata)\n elif k == \"d\" and b == \"up\":\n ax.scroll(0, 0.1)\n elif (k == \"d\" and b == \"down\"):\n ax.scroll(0, -0.1)\n elif k == \"c\" and b == \"up\":\n ax.scroll(-0.1, 0)\n elif k == \"c\" and b == \"down\":\n ax.scroll(0.1, 0)\n ax.figure.canvas.draw_idle()\n\n @staticmethod\n def onclick(e):\n ax = e.inaxes\n if (ax and e.button==1 and hasattr(ax, \"zoomrect\") and ax.zoomrect):\n # overview clicked; reposition zoomrect\n r = ax.zoomrect\n x = e.xdata\n y = e.ydata\n arr = ax.transData.inverted().transform(r.get_extents())\n xoff = (arr[1][0]-arr[0][0])*0.5\n yoff = (arr[1][1]-arr[0][1])*0.5\n r.target.set_xlim(x-xoff,x+xoff)\n r.target.set_ylim(y-yoff,y+yoff)\n r(r.target)\n ax.figure.canvas.draw_idle()\n\n elif ax and e.button==2:\n ## print \"pan start\", (e.xdata, e.ydata)\n ax.pan_start = (e.xdata, e.ydata)\n ax.figure.canvas.draw_idle()\n\n elif ax and hasattr(ax, \"aln\") and ax.aln:\n x = int(e.xdata+0.5); y = int(e.ydata+0.5)\n aln = ax.aln\n if (0 <= x <= ax.nchar) and (0 <= y <= ax.ntax):\n seq = aln[y]; char = seq[x]\n if char not in '-?':\n v = sequtil.gapidx(seq)\n i = (v[1]==x).nonzero()[0][0]\n print(\"%s: row %s, site %s: '%s', seqpos %s\"\n % (seq.id, y, x, char, i))\n else:\n print(\"%s: row %s, site %s: '%s'\" % (seq.id, y, x, char))\n\n def zoom(self, factor=0.1):\n \"Zoom both axes by *factor* (relative display size).\"\n self.detail.zoom(factor, factor)\n self.figure.canvas.draw_idle()\n\n def __get_selection(self):\n return self.detail.extract_selected()\n selected = property(__get_selection)\n \nclass Alignment(Axes):\n \"\"\"\n matplotlib.axes.Axes subclass for rendering sequence alignments.\n \"\"\"\n def __init__(self, fig, rect, *args, **kwargs):\n self.aln = kwargs.pop(\"aln\")\n nrows = len(self.aln)\n ncols = self.aln.get_alignment_length()\n self.alnidx = numpy.arange(ncols)\n self.app = kwargs.pop(\"app\", None)\n self.showy = kwargs.pop('showy', True)\n Axes.__init__(self, fig, rect, *args, **kwargs)\n rgb = mpl_colors.colorConverter.to_rgb\n gray = rgb('gray')\n d = defaultdict(lambda:gray)\n d[\"A\"] = rgb(\"red\")\n d[\"a\"] = rgb(\"red\")\n d[\"C\"] = rgb(\"blue\")\n d[\"c\"] = rgb(\"blue\")\n d[\"G\"] = rgb(\"green\")\n d[\"g\"] = rgb(\"green\")\n d[\"T\"] = rgb(\"yellow\")\n d[\"t\"] = rgb(\"yellow\")\n self.cmap = d\n self.selector = RectangleSelector(\n self, self.select_rectangle, useblit=True\n )\n def f(e):\n if e.button != 1: return True\n else: return RectangleSelector.ignore(self.selector, e)\n self.selector.ignore = f\n self.selected_rectangle = Rectangle(\n [0,0],0,0, facecolor='white', edgecolor='cyan', alpha=0.3\n )\n self.add_patch(self.selected_rectangle)\n self.highlight_find_collection = None\n\n def plot_aln(self):\n cmap = self.cmap\n self.ntax = len(self.aln); self.nchar = self.aln.get_alignment_length()\n a = numpy.array([ [ cmap[base] for base in x.seq ]\n for x in self.aln ])\n self.array = a\n self.imshow(a, interpolation='nearest', aspect='auto', origin='lower')\n y = [ i+0.5 for i in xrange(self.ntax) ]\n labels = [ x.id for x in self.aln ]\n ## locator.bin_boundaries(1,ntax)\n ## locator.view_limits(1,ntax)\n if self.showy:\n locator = MaxNLocator(nbins=50, integer=True)\n self.yaxis.set_major_locator(locator)\n def fmt(x, pos=None):\n if x<0: return \"\"\n try: return labels[int(round(x))]\n except: pass\n return \"\"\n self.yaxis.set_major_formatter(FuncFormatter(fmt))\n else:\n self.yaxis.set_major_locator(NullLocator())\n \n return self\n\n def select_rectangle(self, e0, e1):\n x0, x1 = map(int, sorted((e0.xdata+0.5, e1.xdata+0.5)))\n y0, y1 = map(int, sorted((e0.ydata+0.5, e1.ydata+0.5)))\n self.selected_chars = (x0, x1)\n self.selected_taxa = (y0, y1)\n self.selected_rectangle.set_bounds(x0-0.5,y0-0.5,x1-x0+1,y1-y0+1)\n self.app.figure.canvas.draw_idle()\n\n def highlight_find(self, substr):\n if not substr:\n if self.highlight_find_collection:\n self.highlight_find_collection.remove()\n self.highlight_find_collection = None\n return\n \n N = len(substr)\n v = []\n for y, x in align.find(self.aln, substr):\n r = Rectangle(\n [x-0.5,y-0.5], N, 1,\n facecolor='cyan', edgecolor='cyan', alpha=0.7\n )\n v.append(r)\n if self.highlight_find_collection:\n self.highlight_find_collection.remove()\n c = PatchCollection(v, True)\n self.highlight_find_collection = self.add_collection(c)\n self.app.figure.canvas.draw_idle()\n\n def extract_selected(self):\n r0, r1 = self.selected_taxa\n c0, c1 = self.selected_chars\n return self.aln[r0:r1+1,c0:c1+1]\n\n def zoom_cxy(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view, with a fixed data point (cx, cy)\n \"\"\"\n transform = self.transData.inverted().transform\n xlim = self.get_xlim(); xmid = sum(xlim)*0.5\n ylim = self.get_ylim(); ymid = sum(ylim)*0.5\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = xmid-x0; deltay = ymid-y0\n cx = cx or xmid; cy = cy or ymid\n xoff = (cx-xmid)*x\n self.set_xlim(xmid-deltax+xoff, xmid+deltax+xoff)\n yoff = (cy-ymid)*y\n self.set_ylim(ymid-deltay+yoff, ymid+deltay+yoff)\n\n def zoom(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view.\n \"\"\"\n # get the function to convert display coordinates to data\n # coordinates\n transform = self.transData.inverted().transform\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = x0 - xlim[0]; deltay = y0 - ylim[0]\n self.set_xlim(xlim[0]+deltax, xlim[1]-deltax)\n self.set_ylim(ylim[0]+deltay, ylim[1]-deltay)\n\n def center_y(self, y):\n ymin, ymax = self.get_ylim()\n yoff = (ymax - ymin) * 0.5\n self.set_ylim(y-yoff, y+yoff)\n\n def center_x(self, x, offset=0.3):\n xmin, xmax = self.get_xlim()\n xspan = xmax - xmin\n xoff = xspan*0.5 + xspan*offset\n self.set_xlim(x-xoff, x+xoff)\n\n def scroll(self, x, y):\n x0, x1 = self.get_xlim()\n y0, y1 = self.get_ylim()\n xd = (x1-x0)*x\n yd = (y1-y0)*y\n self.set_xlim(x0+xd, x1+xd)\n self.set_ylim(y0+yd, y1+yd)\n\n def home(self):\n self.set_xlim(0, self.nchar)\n self.set_ylim(self.ntax, 0)\n\nAlignmentPlot = subplot_class_factory(Alignment)\n\n", "ivy/vis/hardcopy.py": "import os, matplotlib\nfrom matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\nfrom matplotlib.figure import Figure\nfrom . import tree\nimport tempfile\n\n## class TreeFigure:\n## def __init__(self):\n## pass\n\nmatplotlib.rcParams[\"xtick.direction\"] = \"out\"\n\nclass TreeFigure:\n def __init__(self, root, relwidth=0.5, leafpad=1.5, name=None,\n support=70.0, scaled=True, mark_named=True,\n leaf_fontsize=10, branch_fontsize=10,\n branch_width=1, branch_color=\"black\",\n highlight_support=True,\n branchlabels=True, leaflabels=True, decorators=[],\n xoff=0, yoff=0,\n xlim=None, ylim=None,\n height=None, width=None):\n self.root = root\n self.relwidth = relwidth\n self.leafpad = leafpad\n self.name = name\n self.support = support\n self.scaled = scaled\n self.mark_named = mark_named\n self.leaf_fontsize = leaf_fontsize\n self.branch_fontsize = branch_fontsize\n self.branch_width = branch_width\n self.branch_color = branch_color\n self.highlight_support = highlight_support\n self.branchlabels = branchlabels\n self.leaflabels = leaflabels\n self.decorators = decorators\n self.xoff = xoff\n self.yoff = yoff\n\n nleaves = len(root.leaves())\n self.dpi = 72.0\n h = height or (nleaves*self.leaf_fontsize*self.leafpad)/self.dpi\n self.height = h\n self.width = width or self.height*self.relwidth\n ## p = min(self.width, self.height)*0.1\n ## self.height += p\n ## self.width += p\n self.figure = Figure(figsize=(self.width, self.height), dpi=self.dpi)\n self.canvas = FigureCanvas(self.figure)\n self.axes = self.figure.add_axes(\n tree.TreePlot(self.figure, 1,1,1,\n support=self.support,\n scaled=self.scaled,\n mark_named=self.mark_named,\n leaf_fontsize=self.leaf_fontsize,\n branch_fontsize=self.branch_fontsize,\n branch_width=self.branch_width,\n branch_color=self.branch_color,\n highlight_support=self.highlight_support,\n branchlabels=self.branchlabels,\n leaflabels=self.leaflabels,\n interactive=False,\n decorators=self.decorators,\n xoff=self.xoff, yoff=self.yoff,\n name=self.name).plot_tree(self.root)\n )\n self.axes.spines[\"top\"].set_visible(False)\n self.axes.spines[\"left\"].set_visible(False)\n self.axes.spines[\"right\"].set_visible(False)\n self.axes.spines[\"bottom\"].set_smart_bounds(True)\n self.axes.xaxis.set_ticks_position(\"bottom\")\n\n for v in self.axes.node2label.values():\n v.set_visible(True)\n\n ## for k, v in self.decorators:\n ## func, args, kwargs = v\n ## func(self.axes, *args, **kwargs)\n\n self.canvas.draw()\n ## self.axes.home()\n ## adjust_limits(self.axes)\n self.axes.set_position([0.05,0.05,0.95,0.95])\n\n @property\n def detail(self):\n return self.axes\n \n def savefig(self, fname):\n root, ext = os.path.splitext(fname)\n buf = tempfile.TemporaryFile()\n for i in range(3):\n self.figure.savefig(buf, format=ext[1:].lower())\n self.home()\n buf.seek(0)\n buf.close()\n self.figure.savefig(fname)\n\n def set_relative_width(self, relwidth):\n w, h = self.figure.get_size_inches()\n self.figure.set_figwidth(h*relwidth)\n\n def autoheight(self):\n \"adjust figure height to show all leaf labels\"\n nleaves = len(self.root.leaves())\n h = (nleaves*self.leaf_fontsize*self.leafpad)/self.dpi\n self.height = h\n self.figure.set_size_inches(self.width, self.height)\n self.axes.set_ylim(-2, nleaves+2)\n\n def home(self):\n self.axes.home()\n", "ivy/vis/symbols.py": "\"\"\"\nConvenience functions for drawing shapes on TreePlots.\n\"\"\"\ntry:\n import Image\nexcept ImportError:\n from PIL import Image\nfrom numpy import pi\nfrom matplotlib.collections import RegularPolyCollection, CircleCollection\nfrom matplotlib.transforms import offset_copy\nfrom matplotlib.patches import Rectangle, Wedge, Circle, PathPatch\nfrom matplotlib.offsetbox import DrawingArea\nfrom matplotlib.axes import Axes\nfrom matplotlib.path import Path\n\ntry:\n from matplotlib.offsetbox import OffsetImage, AnnotationBbox\nexcept ImportError:\n pass\nfrom ..tree import Node\nfrom . import colors as _colors\n\ndef _xy(plot, p):\n if isinstance(p, Node):\n c = plot.n2c[p]\n p = (c.x, c.y)\n elif isinstance(p, (list, tuple)):\n p = [ _xy(plot, x) for x in p ]\n else:\n pass\n return p\n\n\n\ndef image(plot, p, imgfile,\n maxdim=100, border=0,\n xoff=4, yoff=4,\n halign=0.0, valign=0.5,\n xycoords='data',\n boxcoords=('offset points')):\n \"\"\"\n Add images to plot\n\n Args:\n plot (Tree): A Tree plot instance\n p (Node): A node object\n imgfile (str): A path to an image\n maxdim (float): Maximum dimension of image. Optional,\n defaults to 100.\n border: RR: What does border do? -CZ\n xoff, yoff (float): X and Y offset. Optional, defaults to 4\n halign, valign (float): Horizontal and vertical alignment within\n box. Optional, defaults to 0.0 and 0.5, respectively.\n\n \"\"\"\n if xycoords == \"label\":\n xycoords = plot.node2label[p]\n x, y = (1, 0.5)\n else:\n x, y = _xy(plot, p)\n img = Image.open(imgfile)\n if max(img.size) > maxdim:\n img.thumbnail((maxdim, maxdim))\n imgbox = OffsetImage(img)\n abox = AnnotationBbox(imgbox, (x, y),\n xybox=(xoff, yoff),\n xycoords=xycoords,\n box_alignment=(halign,valign),\n pad=0.0,\n boxcoords=boxcoords)\n plot.add_artist(abox)\n plot.figure.canvas.draw_idle()\n\ndef images(plot, p, imgfiles,\n maxdim=100, border=0,\n xoff=4, yoff=4,\n halign=0.0, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n \"\"\"\n Add many images to plot at once\n\n Args:\n Plot (Tree): A Tree plot instance\n p (list): A list of node objects\n imgfile (list): A list of strs containing paths to image files.\n Must be the same length as p.\n maxdim (float): Maximum dimension of image. Optional,\n defaults to 100.\n border: RR: What does border do? -CZ\n xoff, yoff (float): X and Y offset. Optional, defaults to 4\n halign, valign (float): Horizontal and vertical alignment within\n box. Optional, defaults to 0.0 and 0.5, respectively.\n\n \"\"\"\n for x, f in zip(p, imgfiles):\n image(plot, x, f, maxdim, border, xoff, yoff, halign, valign,\n xycoords, boxcoords)\n\ndef pie(plot, p, values, colors=None, size=16, norm=True,\n xoff=0, yoff=0,\n halign=0.5, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n \"\"\"\n Draw a pie chart\n\n Args:\n plot (Tree): A Tree plot instance\n p (Node): A Node object\n values (list): A list of floats.\n colors (list): A list of strings to pull colors from. Optional.\n size (float): Diameter of the pie chart\n norm (bool): Whether or not to normalize the values so they\n add up to 360\n xoff, yoff (float): X and Y offset. Optional, defaults to 0\n halign, valign (float): Horizontal and vertical alignment within\n box. Optional, defaults to 0.5\n\n \"\"\"\n x, y = _xy(plot, p)\n da = DrawingArea(size, size); r = size*0.5; center = (r,r)\n x0 = 0\n S = 360.0\n if norm: S = 360.0/sum(values)\n if not colors:\n c = _colors.tango()\n colors = [ c.next() for v in values ]\n for i, v in enumerate(values):\n theta = v*S\n if v: da.add_artist(Wedge(center, r, x0, x0+theta,\n fc=colors[i], ec='none'))\n x0 += theta\n box = AnnotationBbox(da, (x,y), pad=0, frameon=False,\n xybox=(xoff, yoff),\n xycoords=xycoords,\n box_alignment=(halign,valign),\n boxcoords=boxcoords)\n plot.add_artist(box)\n plot.figure.canvas.draw_idle()\n return box\n\ndef hbar(plot, p, values, colors=None, height=16,\n xoff=0, yoff=0,\n halign=1, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n x, y = _xy(plot, p)\n h = height; w = sum(values) * height#; yoff=h*0.5\n da = DrawingArea(w, h)\n x0 = -sum(values)\n if not colors:\n c = _colors.tango()\n colors = [ c.next() for v in values ]\n for i, v in enumerate(values):\n if v: da.add_artist(Rectangle((x0,0), v*h, h, fc=colors[i], ec='none'))\n x0 += v*h\n box = AnnotationBbox(da, (x,y), pad=0, frameon=False,\n xybox=(xoff, yoff),\n xycoords=xycoords,\n box_alignment=(halign,valign),\n boxcoords=boxcoords)\n plot.add_artist(box)\n plot.figure.canvas.draw_idle()\n\ndef hbars(plot, p, values, colors=None, height=16,\n xoff=0, yoff=0,\n halign=1, valign=0.5,\n xycoords='data', boxcoords=('offset points')):\n for x, v in zip(p, values):\n hbar(plot, x, v, colors, height, xoff, yoff, halign, valign,\n xycoords, boxcoords)\n\ndef squares(plot, p, colors='r', size=15, xoff=0, yoff=0, alpha=1.0,\n zorder=1000):\n \"\"\"\n Draw a square at given node\n\n Args:\n plot (Tree): A Tree plot instance\n p: A node or list of nodes\n colors: Str or list of strs. Colors of squares to be drawn.\n Optional, defaults to 'r' (red)\n size (float): Size of the squares. Optional, defaults to 15\n xoff, yoff (float): Offset for x and y dimensions. Optional,\n defaults to 0.\n alpha (float): between 0 and 1. Alpha transparency of squares.\n Optional, defaults to 1 (fully opaque)\n zorder (int): The drawing order. Higher numbers appear on top\n of lower numbers. Optional, defaults to 1000.\n\n \"\"\"\n points = _xy(plot, p)\n trans = offset_copy(\n plot.transData, fig=plot.figure, x=xoff, y=yoff, units='points')\n\n col = RegularPolyCollection(\n numsides=4, rotation=pi*0.25, sizes=(size*size,),\n offsets=points, facecolors=colors, transOffset=trans,\n edgecolors='none', alpha=alpha, zorder=zorder\n )\n\n plot.add_collection(col)\n plot.figure.canvas.draw_idle()\n\ndef tipsquares(plot, p, colors='r', size=15, pad=2, edgepad=10):\n \"\"\"\n RR: Bug with this function. If you attempt to call it with a list as an\n argument for p, it will not only not work (expected) but it will also\n make it so that you can't interact with the tree figure (gives errors when\n you try to add symbols, select nodes, etc.) -CZ\n\n Add square after tip label, anchored to the side of the plot\n\n Args:\n plot (Tree): A Tree plot instance.\n p (Node): A Node object (Should be a leaf node).\n colors (str): olor of drawn square. Optional, defaults to 'r' (red)\n size (float): Size of square. Optional, defaults to 15\n pad: RR: I am unsure what this does. Does not seem to have visible\n effect when I change it. -CZ\n edgepad (float): Padding from square to edge of plot. Optional,\n defaults to 10.\n\n \"\"\"\n x, y = _xy(plot, p) # p is a single node or point in data coordinates\n n = len(colors)\n da = DrawingArea(size*n+pad*(n-1), size, 0, 0)\n sx = 0\n for c in colors:\n sq = Rectangle((sx,0), size, size, color=c)\n da.add_artist(sq)\n sx += size+pad\n box = AnnotationBbox(da, (x, y), xybox=(-edgepad,y),\n frameon=False,\n pad=0.0,\n xycoords='data',\n box_alignment=(1, 0.5),\n boxcoords=('axes points','data'))\n plot.add_artist(box)\n plot.figure.canvas.draw_idle()\n\n\ndef circles(plot, p, colors='g', size=15, xoff=0, yoff=0):\n \"\"\"\n Draw circles on plot\n\n Args:\n plot (Tree): A Tree plot instance\n p: A node object or list of Node objects\n colors: Str or list of strs. Colors of the circles. Optional,\n defaults to 'g' (green)\n size (float): Size of the circles. Optional, defaults to 15\n xoff, yoff (float): X and Y offset. Optional, defaults to 0.\n\n \"\"\"\n points = _xy(plot, p)\n trans = offset_copy(\n plot.transData, fig=plot.figure, x=xoff, y=yoff, units='points'\n )\n\n col = CircleCollection(\n sizes=(pi*size*size*0.25,),\n offsets=points, facecolors=colors, transOffset=trans,\n edgecolors='none'\n )\n\n plot.add_collection(col)\n plot.figure.canvas.draw_idle()\n return col\n\ndef legend(plot, colors, labels, shape='rectangle', loc='upper left', **kwargs):\n \"\"\"\n RR: the MPL legend function has changed since this function has been\n written. This function currently does not work. -CZ\n \"\"\"\n if shape == 'circle':\n shapes = [ Circle((0.5,0.5), radius=1, fc=c) for c in colors ]\n #shapes = [ CircleCollection([10],facecolors=[c]) for c in colors ]\n else:\n shapes = [ Rectangle((0,0),1,1,fc=c,ec='none') for c in colors ]\n\n return Axes.legend(plot, shapes, labels, loc=loc, **kwargs)\n\ndef leafspace_triangles(plot, color='black', rca=0.5):\n \"\"\"\n RR: Using this function on the primates tree (straight from the newick file)\n gives error: 'Node' object has no attribute 'leafspace'. How do you give\n nodes the leafspace attribute? -CZ\n rca = relative crown age\n \"\"\"\n leaves = plot.root.leaves()\n leafspace = [ float(x.leafspace) for x in leaves ]\n #leafspace = array(raw_leafspace)/(sum(raw_leafspace)/float(len(leaves)))\n pv = []\n for i, n in enumerate(leaves):\n if leafspace[i] > 0:\n p = plot.n2c[n]\n pp = plot.n2c[n.parent]\n spc = leafspace[i]\n yoff = spc/2.0\n x0 = pp.x + (p.x - pp.x)*rca\n verts = [(x0, p.y),\n (p.x, p.y-yoff),\n (p.x, p.y+yoff),\n (x0, p.y)]\n codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY]\n path = Path(verts, codes)\n patch = PathPatch(path, fc=color, lw=0)\n pv.append(plot.add_patch(patch))\n return pv\n\ndef text(plot, x, y, s, color='black', xoff=0, yoff=0, valign='center',\n halign='left', fontsize=10):\n \"\"\"\n Add text to the plot.\n\n Args:\n plot (Tree): A Tree plot instance\n x, y (float): x and y coordinates to place the text\n s (str): The text to write\n color (str): The color of the text. Optional, defaults to \"black\"\n xoff, yoff (float): x and y offset\n valign (str): Vertical alignment. Can be: 'center', 'top',\n 'bottom', or 'baseline'. Defaults to 'center'.\n halign (str): Horizontal alignment. Can be: 'center', 'right',\n or 'left'. Defaults to 'left'\n fontsize (float): Font size. Optional, defaults to 10\n\n \"\"\"\n txt = plot.annotate(\n s, xy=(x, y),\n xytext=(xoff, yoff),\n textcoords=\"offset points\",\n verticalalignment=valign,\n horizontalalignment=halign,\n fontsize=fontsize,\n clip_on=True,\n picker=True\n )\n txt.set_visible(True)\n return txt\n", "ivy/vis/tree.py": "\"\"\"\ninteractive viewers for trees, etc. using matplotlib\n\"\"\"\nimport sys, math, types, os, operator\nfrom itertools import chain\nfrom .. import tree\nfrom ..layout import cartesian\nfrom ..storage import Storage\nfrom .. import pyperclip as clipboard\n# from ..nodecache import NodeCache\nimport matplotlib.pyplot as pyplot\nfrom matplotlib.axes import Axes, subplot_class_factory\nfrom matplotlib.figure import SubplotParams\nfrom matplotlib.patches import PathPatch, Rectangle, Arc\nfrom matplotlib.path import Path\nfrom matplotlib.widgets import RectangleSelector\nfrom matplotlib import cm as mpl_colormap\nfrom matplotlib import colors as mpl_colors\nfrom matplotlib.collections import LineCollection\ntry:\n from matplotlib.offsetbox import OffsetImage, AnnotationBbox\nexcept ImportError:\n pass\nfrom matplotlib.ticker import NullLocator\nfrom mpl_toolkits.axes_grid.anchored_artists import AnchoredText\nfrom mpl_toolkits.axes_grid1.inset_locator import inset_axes\nfrom . import symbols, colors\nfrom . import hardcopy as HC\ntry:\n import Image\nexcept ImportError:\n from PIL import Image\n\n# matplotlib.rcParams['path.simplify'] = False\n\n_tango = colors.tango()\nclass TreeFigure(object):\n \"\"\"\n Window for showing a single tree, optionally with split overview\n and detail panes.\n\n The navigation toolbar at the bottom is provided by matplotlib\n (http://matplotlib.sf.net/users/navigation_toolbar.html). Its\n pan/zoom button and zoom-rectangle button provide different modes\n of mouse interaction with the figure. When neither of these\n buttons are checked, the default mouse bindings are as follows:\n\n * button 1 drag: select nodes - retrieve by calling fig.selected_nodes\n * button 3 drag: pan view\n * scroll up/down: zoom in/out\n * scroll up/down with Control key: zoom y-axis\n * scroll up/down with Shift key: zoom x-axis\n * scroll up/down with 'd' key: pan view up/down\n * scroll up/down with 'e' key: pan view left/right\n * click on overview will center the detail pane on that region\n\n Default keybindings:\n\n * t: zoom out to full extent\n * +/-: zoom in/out\n\n Useful attributes and methods (assume an instance named *fig*):\n\n * fig.root - the root node (see [Node methods])\n * fig.highlight(s) - highlight and trace nodes with substring *s*\n * fig.zoom_clade(anc) - zoom to view node *anc* and all its descendants\n * fig.toggle_overview() - toggle visibility of the overview pane\n * fig.toggle_branchlabels() - ditto for branch labels\n * fig.toggle_leaflabels() - ditto for leaf labels\n * fig.decorate(func) - decorate the tree with a function (see\n :ref:`decorating TreeFigures `)\n \"\"\"\n def __init__(self, data, name=None, scaled=True, div=0.25,\n branchlabels=True, leaflabels=True, mark_named=True,\n highlight_support=True, xoff=0, yoff=0,\n overview=True, radial=False):\n self.overview = None\n self.overview_width = div\n self.dataplot = None\n self.dataplot_width = 0.25\n self.name = name\n self.scaled = scaled\n self.branchlabels = branchlabels\n self.leaflabels = leaflabels\n self.mark_named = mark_named\n self.xoff = xoff\n self.yoff = yoff\n self.radial = radial\n if radial:\n self.leaflabels = False\n self.highlighted = set()\n self.highlight_support = highlight_support\n if isinstance(data, tree.Node):\n root = data\n else:\n root = tree.read(data)\n self.root = root\n if not self.root:\n raise IOError(\"cannot coerce data into tree.Node\")\n self.name = self.name or root.treename\n pars = SubplotParams(\n left=0, right=1, bottom=0.05, top=1, wspace=0.01\n )\n fig = pyplot.figure(subplotpars=pars, facecolor=\"white\")\n connect_events(fig.canvas)\n self.figure = fig\n self.initialize_subplots(overview)\n self.home()\n\n def initialize_subplots(self, overview=True):\n if not self.radial:\n tp = TreePlot(self.figure, 1, 2, 2, app=self, name=self.name,\n scaled=self.scaled, branchlabels=self.branchlabels,\n highlight_support=self.highlight_support,\n leaflabels=self.leaflabels,\n mark_named=self.mark_named)\n detail = self.figure.add_subplot(tp)\n detail.set_root(self.root)\n detail.plot_tree()\n self.detail = detail\n tp = OverviewTreePlot(\n self.figure, 121, app=self, scaled=self.scaled,\n branchlabels=False, leaflabels=False,\n mark_named=self.mark_named,\n highlight_support=self.highlight_support,\n target=self.detail\n )\n ov = self.figure.add_subplot(tp)\n ov.set_root(self.root)\n ov.plot_tree()\n self.overview = ov\n if not overview:\n self.toggle_overview(False)\n self.set_positions()\n\n if self.detail.nleaves < 50:\n self.toggle_overview(False)\n else:\n tp = RadialTreePlot(\n self.figure, 111, app=self, name=self.name,\n scaled=self.scaled, branchlabels=self.branchlabels,\n highlight_support=self.highlight_support,\n leaflabels=self.leaflabels, mark_named=self.mark_named\n )\n ax2 = self.figure.add_subplot(tp)\n ax2.set_root(self.root)\n ax2.plot_tree()\n self.detail = ax2\n\n def __get_selected_nodes(self):\n return list(self.detail.selected_nodes)\n\n def __set_selected_nodes(self, nodes):\n self.detail.select_nodes(nodes)\n\n def __del_selected_nodes(self):\n self.detail.select_nodes(None)\n\n selected = property(__get_selected_nodes,\n __set_selected_nodes,\n __del_selected_nodes)\n\n ## def selected_nodes(self):\n ## return self.detail.selected_nodes\n\n @property\n def axes(self):\n return self.detail\n\n def add(self, data, name=None, support=70,\n branchlabels=False, leaflabels=True, mark_named=True):\n \"\"\"\n Add a new tree in a new window\n\n Args:\n data: A node object or tree file.\n name (str): Name of the plot. Defaults to None\n branchlabels (bool): Whether or not to draw branch labels.\n Defaults to False\n leaflabels (bool): Whether or not to draw leaf labels.\n Defaults to True\n \"\"\"\n newfig = MultiTreeFigure()\n ## newfig.add(self.root, name=self.name, support=self.support,\n ## branchlabels=self.branchlabels)\n newfig.add(data, name=name, support=support,\n branchlabels=branchlabels,\n leaflabels=leaflabels,\n mark_named=mark_named)\n return newfig\n\n def toggle_leaflabels(self):\n \"\"\"\n Toggle leaf labels and redraw tree\n \"\"\"\n self.leaflabels = not self.leaflabels\n self.detail.leaflabels = self.leaflabels\n self.redraw()\n\n def toggle_branchlabels(self):\n \"\"\"\n Toggle branch labels and redraw tree\n \"\"\"\n self.branchlabels = not self.branchlabels\n self.detail.branchlabels = self.branchlabels\n self.redraw()\n\n def toggle_overview(self, val=None):\n \"\"\"\n Toggle overview\n \"\"\"\n if val is None:\n if self.overview.get_visible():\n self.overview.set_visible(False)\n self.overview_width = 0.001\n else:\n self.overview.set_visible(True)\n self.overview_width = 0.25\n elif val:\n self.overview.set_visible(True)\n self.overview_width = val\n else:\n self.overview.set_visible(False)\n self.overview_width = 0.001\n self.set_positions()\n\n def set_scaled(self, scaled):\n \"\"\"\n RR: Using this method gives the error:\n redraw takes exactly 1 argument(2 given)-CZ\n Define whether or not the tree is scaled and redraw tree\n\n Args:\n scaled (bool): Whether or not the tree is scaled.\n \"\"\"\n for p in self.overview, self.detail:\n p.redraw(p.set_scaled(scaled))\n self.set_positions()\n\n def on_nodes_selected(self, treeplot):\n pass\n\n def picked(self, e):\n try:\n if e.mouseevent.button==1:\n s = e.artist.get_text()\n clipboard.copy(s)\n print(s)\n sys.stdout.flush()\n except:\n pass\n\n def ladderize(self, rev=False):\n \"\"\"\n Ladderize and redraw the tree\n \"\"\"\n self.root.ladderize(rev)\n self.redraw()\n\n def show(self):\n \"\"\"\n Plot the figure in a new window\n \"\"\"\n self.figure.show()\n\n def set_positions(self):\n ov = self.overview\n p = self.detail\n dp = self.dataplot\n height = 1.0-p.xoffset()\n if ov:\n box = [0, p.xoffset(), self.overview_width, height]\n ov.set_position(box)\n w = 1.0\n if ov:\n w -= self.overview_width\n if dp:\n w -= self.dataplot_width\n p.set_position([self.overview_width, p.xoffset(), w, height])\n if dp:\n box = [1.0-self.dataplot_width, p.xoffset(),\n self.dataplot_width, height]\n dp.set_position(box)\n self.figure.canvas.draw_idle()\n\n ## def div(self, v=0.3):\n ## assert 0 <= v < 1\n ## self.overview_width = v\n ## self.set_positions()\n ## self.figure.canvas.draw_idle()\n\n def add_dataplot(self):\n \"\"\"\n Add new plot to the side of existing plot\n \"\"\"\n np = 3 if self.overview else 2\n if self.dataplot:\n self.figure.delaxes(self.dataplot)\n self.dataplot = self.figure.add_subplot(1, np, np, sharey=self.detail)\n # left, bottom, width, height (proportions)\n dleft, dbottom, dwidth, dheight = self.detail.get_position().bounds\n # give the dataplot one-quarter the width of the detail axes\n w = dwidth * 0.25\n self.detail.set_position([dleft, dbottom, dwidth-w, dheight])\n self.dataplot.set_position([1-w, dbottom, w, dheight])\n self.dataplot.xaxis.set_visible(False)\n self.dataplot.yaxis.set_visible(False)\n for x in self.dataplot.spines.values():\n x.set_visible(False)\n self.figure.canvas.draw_idle()\n return self.dataplot\n\n def redraw(self):\n \"\"\"\n Replot the figure and overview\n \"\"\"\n self.detail.redraw()\n if self.overview: self.overview.redraw()\n self.highlight()\n self.set_positions()\n self.figure.canvas.draw_idle()\n\n def find(self, x):\n \"\"\"\n Find nodes\n\n Args:\n x (str): String to search\n Returns:\n list: A list of node objects found with the Node findall() method\n \"\"\"\n return self.root.findall(x)\n\n def hlines(self, nodes, width=5, color=\"red\", xoff=0, yoff=0):\n \"\"\"\n Highlight nodes\n\n Args:\n nodes (list): A list of node objects\n width (float): Width of highlighted lines. Defaults to 5\n color (str): Color of highlighted lines. Defaults to red\n xoff (float): Number of units to offset lines by. Defaults to 0\n yoff (float): Number of units to offset lines by. Defaults to 0\n \"\"\"\n self.overview.hlines(nodes, width=width, color=color,\n xoff=xoff, yoff=yoff)\n self.detail.hlines(nodes, width=width, color=color,\n xoff=xoff, yoff=yoff)\n\n def highlight(self, x=None, width=5, color=\"red\"):\n \"\"\"\n Highlight nodes\n\n Args:\n x: Str or list of Strs or Node or list of Nodes\n width (float): Width of highlighted lines. Defaults to 5\n color (str): Color of highlighted lines. Defaults to red\n \"\"\"\n if x:\n nodes = set()\n if type(x) in types.StringTypes:\n nodes = self.root.findall(x)\n elif isinstance(x, tree.Node):\n nodes = set(x)\n else:\n for n in x:\n if type(n) in types.StringTypes:\n found = self.root.findall(n)\n if found:\n nodes |= set(found)\n elif isinstance(n, tree.Node):\n nodes.add(n)\n\n self.highlighted = nodes\n else:\n self.highlighted = set()\n if self.overview:\n self.overview.highlight(self.highlighted, width=width, color=color)\n self.detail.highlight(self.highlighted, width=width, color=color)\n self.figure.canvas.draw_idle()\n\n def home(self):\n \"\"\"\n Return plot to initial size and location.\n \"\"\"\n if self.overview: self.overview.home()\n self.detail.home()\n\n def zoom_clade(self, x):\n \"\"\"\n Zoom to fit a node *x* and all its descendants in the view.\n\n Args:\n x: Node or str that matches the label of a node\n \"\"\"\n if not isinstance(x, tree.Node):\n x = self.root[x]\n self.detail.zoom_clade(x)\n\n def zoom(self, factor=0.1):\n \"\"\"Zoom both axes by *factor* (relative display size).\"\"\"\n self.detail.zoom(factor, factor)\n self.figure.canvas.draw_idle()\n\n def zx(self, factor=0.1):\n \"\"\"Zoom x axis by *factor*.\"\"\"\n self.detail.zoom(factor, 0)\n self.figure.canvas.draw_idle()\n\n def zy(self, factor=0.1):\n \"\"\"Zoom y axis by *factor*.\"\"\"\n self.detail.zoom(0, factor)\n self.figure.canvas.draw_idle()\n\n def decorate(self, func, *args, **kwargs):\n \"\"\"\n Decorate the tree.\n\n Args:\n func (function): A function that takes a TreePlot instance as the\n first parameter, and *args* and *kwargs* as additional\n parameters. It adds boxes, circles, etc to the TreePlot.\n\n Notes:\n If *kwargs* contains the key-value pair ('store', *name*),\n then the function is stored as *name* and re-called every time\n the TreePlot is redrawn, i.e., the decoration is persistent.\n Use ``rmdec(name)`` to remove the decorator from the treeplot.\n \"\"\"\n self.detail.decorate(func, *args, **kwargs)\n\n def rmdec(self, name):\n \"Remove the decoration 'name'.\"\n self.detail.rmdec(name)\n ## if name in self.detail.decorators:\n ## del self.detail.decorators[name]\n\n def cbar(self, node, width=6, color='blue', mrca = True):\n pass\n # self.axes.cbar(nodes = node, width = width, color = color, mrca = mrca)\n\n def unclutter(self, *args):\n self.detail.unclutter()\n\n def trace_branches(self, nodes, width=4, color=\"blue\"):\n \"\"\"\n RR: What is the difference between this and highlight? -CZ\n \"\"\"\n for p in self.overview, self.detail:\n p.trace_branches(nodes, width, color)\n\n def plot_continuous(self, *args, **kwargs):\n self.detail.plot_continuous(*args, **kwargs)\n\n def hardcopy(self, fname=None, relwidth=None, leafpad=1.5):\n if not relwidth:\n bbox = self.detail.get_tightbbox(self.figure.canvas.get_renderer())\n relwidth = bbox.width/bbox.height\n f = self.detail.hardcopy(\n relwidth=relwidth,\n leafpad=leafpad\n )\n f.axes.home()\n #f.axes.set_xlim(*self.detail.get_xlim())\n #f.axes.set_ylim(*self.detail.get_ylim())\n if fname:\n f.savefig(fname)\n return f\n\n def select_nodes(self, nodes=None):\n \"\"\"\n Select nodes on the plot\n\n Args:\n nodes: A node or list of ndoes\n Notes:\n If only one node is given, all of the node's ancestors are\n also selected. If a list of nodes is given (even if it has only\n one node), only the given node(s) are selected.\n \"\"\"\n self.detail.select_nodes(nodes)\n\n def decorate(self, func, *args, **kwargs): # RR: is this repeated from above? -CZ\n self.detail.decorate(func, *args, **kwargs)\n\n ## def dataplot(self):\n ## ax = self.figure.add_subplot(133, sharey=self.detail)\n ## ax.yaxis.set_visible(False)\n ## self.dataplot = ax\n ## return ax\n\n def attach_alignment(self, aln, overview=True):\n \"leaf labels expected to be sequence ids\"\n from Bio.Align import MultipleSeqAlignment\n from Bio.Seq import Seq\n from Bio.SeqRecord import SeqRecord\n from Bio.Alphabet import IUPAC\n from alignment import AlignmentFigure, AlignmentPlot\n if not isinstance(aln, MultipleSeqAlignment):\n from .. import align\n aln = align.read(aln)\n d = dict([ (x.id,x) for x in aln ])\n emptyseq = Seq('-'*aln.get_alignment_length(),\n alphabet=IUPAC.ambiguous_dna)\n aln = MultipleSeqAlignment(\n [ d.get(x.label) or SeqRecord(emptyseq, id=x.label)\n for x in self.root.leaves() ]\n )\n self.aln = aln\n p = AlignmentPlot(self.figure, 133, aln=aln, app=self,\n sharey=self.detail, showy=False)\n self.alnplot = Storage()\n self.alnplot.detail = self.figure.add_subplot(p)\n detail = self.alnplot.detail\n detail.plot_aln()\n if overview:\n self.alnplot.overview = inset_axes(\n detail, width=\"30%\", height=\"20%\", loc=1\n )\n overview = self.alnplot.overview\n overview.xaxis.set_major_locator(NullLocator())\n overview.yaxis.set_major_locator(NullLocator())\n overview.imshow(\n detail.array, interpolation='nearest', aspect='auto',\n origin='lower'\n )\n rect = UpdatingRect(\n [0,0], 0, 0, facecolor='black', edgecolor='cyan', alpha=0.5\n )\n overview.zoomrect = rect\n rect.target = detail\n detail.callbacks.connect('xlim_changed', rect)\n detail.callbacks.connect('ylim_changed', rect)\n overview.add_patch(rect)\n rect(overview)\n self.toggle_overview(False)\n xoff = self.detail.xoffset()\n self.detail.set_position([0, xoff, 0.3, 1.0-xoff])\n p.set_position([0.3, xoff, 0.7, 1.0-xoff])\n\n\nclass MultiTreeFigure(object):\n \"\"\"\n Window for showing multiple trees side-by-side.\n\n TODO: document this\n \"\"\"\n def __init__(self, trees=None, name=None, support=70,\n scaled=True, branchlabels=False, radial=False):\n \"\"\"\n *trees* are assumed to be objects suitable for passing to\n ivy.tree.read()\n \"\"\"\n self.root = []\n self.name = name\n self.name2plot = {}\n self.plot = []\n self.scaled = scaled\n self.branchlabels = branchlabels\n self.radial = radial\n self.highlighted = set()\n self.divs = []\n pars = SubplotParams(\n left=0, right=1, bottom=0.05, top=1, wspace=0.04\n )\n fig = pyplot.figure(subplotpars=pars)\n connect_events(fig.canvas)\n self.figure = fig\n\n for x in trees or []:\n self.add(x, support=support, scaled=scaled,\n branchlabels=branchlabels)\n\n def on_nodes_selected(self, treeplot):\n pass\n\n def clear(self):\n self.root = []\n self.name2plot = {}\n self.highlighted = set()\n self.divs = []\n self.figure.clf()\n\n def picked(self, e):\n try:\n if e.mouseevent.button==1:\n print(e.artist.get_text())\n sys.stdout.flush()\n except:\n pass\n\n def getplot(self, x):\n p = None\n try:\n i = self.root.index(x)\n return self.plot[i]\n except ValueError:\n return self.name2plot.get(x)\n\n def add(self, data, name=None, support=70, scaled=True,\n branchlabels=False, leaflabels=True, mark_named=True):\n root = None\n if isinstance(data, tree.Node):\n root = data\n else:\n root = tree.read(data)\n if not root:\n raise IOError(\"cannot coerce data into tree.Node\")\n\n name = name or root.treename\n self.root.append(root)\n\n fig = self.figure\n N = len(self.plot)+1\n for i, p in enumerate(self.plot):\n p.change_geometry(1, N, i+1)\n plt = TreePlot(fig, 1, N, N, app=self, name=name, support=support,\n scaled=scaled, branchlabels=branchlabels,\n leaflabels=leaflabels, mark_named=mark_named)\n p = fig.add_subplot(plt)\n p.set_root(root)\n p.plot_tree()\n p.index = N-1\n self.plot.append(p)\n if name:\n assert name not in self.name2plot\n self.name2plot[name] = p\n\n ## global IP\n ## if IP:\n ## def f(shell, s):\n ## self.highlight(s)\n ## return sorted([ x.label for x in self.highlighted ])\n ## IP.expose_magic(\"highlight\", f)\n ## def f(shell, s):\n ## self.root.ladderize()\n ## self.redraw()\n ## IP.expose_magic(\"ladderize\", f)\n ## def f(shell, s):\n ## self.show()\n ## IP.expose_magic(\"show\", f)\n ## def f(shell, s):\n ## self.redraw()\n ## IP.expose_magic(\"redraw\", f)\n return p\n\n def show(self):\n self.figure.show()\n\n def redraw(self):\n for p in self.plot:\n p.redraw()\n self.figure.canvas.draw_idle()\n\n def ladderize(self, reverse=False):\n for n in self.root:\n n.ladderize(reverse)\n self.redraw()\n\n def highlight(self, s=None, add=False, width=5, color=\"red\"):\n \"\"\"\n Highlight nodes\n\n Args:\n s: Str or list of Strs or Node or list of Nodes\n add (bool): Whether to add to existing highlighted nodes or\n overwrite them.\n width (float): Width of highlighted lines. Defaults to 5\n color (str): Color of highlighted lines. Defaults to red\n \"\"\"\n if not s:\n self.highlighted = set()\n if not add:\n self.highlighted = set()\n\n nodesets = [ p.root.findall(s) for p in self.plot ]\n\n for nodes, p in zip(nodesets, self.plot):\n if nodes:\n p.highlight(nodes, width=width, color=color)\n else:\n p.highlight()\n\n self.highlighted = nodesets\n self.figure.canvas.draw_idle()\n\n ## for root in self.root:\n ## for node in root.iternodes():\n ## if node.label and (s in node.label):\n ## self.highlighted.add(node)\n ## self.highlight()\n\n def home(self):\n for p in self.plot: p.home()\n\n\ndef connect_events(canvas):\n mpl_connect = canvas.mpl_connect\n mpl_connect(\"button_press_event\", onclick)\n mpl_connect(\"button_release_event\", onbuttonrelease)\n mpl_connect(\"scroll_event\", onscroll)\n mpl_connect(\"pick_event\", onpick)\n mpl_connect(\"motion_notify_event\", ondrag)\n mpl_connect(\"key_press_event\", onkeypress)\n mpl_connect(\"axes_enter_event\", axes_enter)\n mpl_connect(\"axes_leave_event\", axes_leave)\n\nclass UpdatingRect(Rectangle):\n def __call__(self, p):\n self.set_bounds(*p.viewLim.bounds)\n p.figure.canvas.draw_idle()\n\nclass Tree(Axes):\n \"\"\"\n matplotlib.axes.Axes subclass for rendering trees.\n \"\"\"\n def __init__(self, fig, rect, *args, **kwargs):\n self.root = None\n self.app = kwargs.pop(\"app\", None)\n self.support = kwargs.pop(\"support\", 70.0)\n self.scaled = kwargs.pop(\"scaled\", True)\n self.leaflabels = kwargs.pop(\"leaflabels\", True)\n self.branchlabels = kwargs.pop(\"branchlabels\", True)\n self._mark_named = kwargs.pop(\"mark_named\", True)\n self.name = None\n self.leaf_fontsize = kwargs.pop(\"leaf_fontsize\", 10)\n self.branch_fontsize = kwargs.pop(\"branch_fontsize\", 10)\n self.branch_width = kwargs.pop(\"branch_width\", 1)\n self.branch_color = kwargs.pop(\"branch_color\", \"black\")\n self.interactive = kwargs.pop(\"interactive\", True)\n self.decorators = kwargs.pop(\"decorators\", [])\n ## if self.decorators:\n ## print >> sys.stderr, \"got %s decorators\" % len(self.decorators)\n self.xoff = kwargs.pop(\"xoff\", 0)\n self.yoff = kwargs.pop(\"yoff\", 0)\n self.highlight_support = kwargs.pop(\"highlight_support\", True)\n self.smooth_xpos = kwargs.pop(\"smooth_xpos\", 0)\n Axes.__init__(self, fig, rect, *args, **kwargs)\n self.nleaves = 0\n self.highlighted = None\n self.highlightpatch = None\n self.pan_start = None\n if not self.decorators:\n self.decorators = [\n (\"__selected_nodes__\", (Tree.highlight_selected_nodes, [], {}))\n ]\n self.name2dec = dict([ (x[0], i) for i, x in\n enumerate(self.decorators) ])\n self._active = False\n\n if self.interactive:\n self.callbacks.connect(\"ylim_changed\", self.draw_labels)\n self.selector = RectangleSelector(self, self.rectselect,\n useblit=True)\n def f(e):\n if e.button != 1: return True\n else: return RectangleSelector.ignore(self.selector, e)\n self.selector.ignore = f\n self.xoffset_value = 0.05\n self.selected_nodes = set()\n self.leaf_offset = 4\n self.leaf_valign = \"center\"\n self.leaf_halign = \"left\"\n self.branch_offset = -5\n self.branch_valign = \"center\"\n self.branch_halign = \"right\"\n\n self.spines[\"top\"].set_visible(False)\n self.spines[\"left\"].set_visible(False)\n self.spines[\"right\"].set_visible(False)\n self.xaxis.set_ticks_position(\"bottom\")\n\n def p2y(self):\n \"Convert a single display point to y-units\"\n transform = self.transData.inverted().transform\n return transform([0,1])[1] - transform([0,0])[1]\n\n def p2x(self):\n \"Convert a single display point to y-units\"\n transform = self.transData.inverted().transform\n return transform([0,0])[1] - transform([1,0])[1]\n\n def decorate(self, func, *args, **kwargs):\n \"\"\"\n Decorate the tree with function *func*. If *kwargs* contains\n the key-value pair ('store', *name*), the decorator function\n is stored in self.decorators and called upon every redraw.\n \"\"\"\n name = kwargs.pop(\"store\", None)\n if name:\n if name in self.name2dec:\n i = self.name2dec[name]\n self.decorators[i] = (name, (func, args, kwargs))\n else:\n self.decorators.append((name, (func, args, kwargs)))\n self.name2dec = dict([ (x[0], i) for i, x in\n enumerate(self.decorators) ])\n\n func(self, *args, **kwargs)\n\n def rmdec(self, name):\n if name in self.name2dec:\n i = self.name2dec[name]\n del self.decorators[i]\n self.name2dec = dict([ (x[0], i) for i, x in\n enumerate(self.decorators) ])\n\n\n def flip(self):\n \"\"\"\n Reverse the direction of the x-axis.\n \"\"\"\n self.leaf_offset *= -1\n self.branch_offset *= -1\n ha = self.leaf_halign\n self.leaf_halign = \"right\" if ha == \"left\" else \"left\"\n ha = self.branch_halign\n self.branch_halign = \"right\" if ha == \"left\" else \"left\"\n self.invert_xaxis()\n self.redraw()\n\n def xoffset(self):\n \"\"\"Space below x axis to show tick labels.\"\"\"\n if self.scaled:\n return self.xoffset_value\n else:\n return 0\n\n def save_newick(self, filename):\n \"\"\"\n Save tree as a newick file.\n\n Args:\n filename (str): Path to file.\n\n \"\"\"\n if os.path.exists(filename):\n s = raw_input(\"File %s exists, enter 'y' to overwrite \").strip()\n if (s and s.lower() != 'y') or (not s):\n return\n import newick\n f = file(filename, \"w\")\n f.write(newick.string(self.root))\n f.close()\n\n def set_scaled(self, scaled):\n flag = self.scaled != scaled\n self.scaled = scaled\n return flag\n\n def cbar(self, nodes, color=None, label=None, x=None, width=8, xoff=10,\n showlabel=True, mrca=True):\n \"\"\"\n Draw a 'clade' bar (i.e., along the y-axis) indicating a\n clade. *nodes* are assumed to be one or more nodes in the\n tree. If just one, it should be the internal node\n representing the clade of interest; otherwise, the clade of\n interest is the most recent common ancestor of the specified\n nodes. *label* is an optional string to be drawn next to the\n bar, *offset* by the specified number of display units. If\n *label* is ``None`` then the clade's label is used instead.\n\n Args:\n nodes: Node or list of nodes\n color (str): Color of the bar. Optional, defaults to None.\n label (str): Optional label for bar. If None, the clade's\n label is used instead. Defaults to None.\n width (float): Width of bar\n xoff (float): Offset from label to bar\n showlabel (bool): Whether or not to draw the label\n mrca: RR: Not quite sure what this does -CZ\n\n \"\"\"\n xlim = self.get_xlim(); ylim = self.get_ylim()\n if color is None: color = _tango.next()\n transform = self.transData.inverted().transform\n\n if mrca:\n if isinstance(nodes, tree.Node):\n spec = nodes\n elif type(nodes) in types.StringTypes:\n spec = self.root.get(nodes)\n else:\n spec = self.root.mrca(nodes)\n\n assert spec in self.root\n label = label or spec.label\n leaves = spec.leaves()\n\n else:\n leaves = nodes\n\n n2c = self.n2c\n\n y = sorted([ n2c[n].y for n in leaves ])\n ymin = y[0]; ymax = y[-1]; y = (ymax+ymin)*0.5\n\n if x is None:\n x = max([ n2c[n].x for n in leaves ])\n _x = 0\n for lf in leaves:\n txt = self.node2label.get(lf)\n if txt and txt.get_visible():\n _x = max(_x, transform(txt.get_window_extent())[1,0])\n if _x > x: x = _x\n\n v = sorted(list(transform(((0,0),(xoff,0)))[:,0]))\n xoff = v[1]-v[0]\n x += xoff\n\n Axes.plot(self, [x,x], [ymin, ymax], '-', linewidth=width, color=color)\n\n if showlabel and label:\n xo = self.leaf_offset\n if xo > 0:\n xo += width*0.5\n else:\n xo -= width*0.5\n txt = self.annotate(\n label,\n xy=(x, y),\n xytext=(xo, 0),\n textcoords=\"offset points\",\n verticalalignment=self.leaf_valign,\n horizontalalignment=self.leaf_halign,\n fontsize=self.leaf_fontsize,\n clip_on=True,\n picker=False\n )\n\n self.set_xlim(xlim); self.set_ylim(ylim)\n\n def anctrace(self, anc, descendants=None, width=4, color=\"blue\"):\n \"\"\"\n RR: This function gives me a 'list index out of range' error\n when I try to use it -CZ\n \"\"\"\n if not descendants:\n descendants = anc.leaves()\n else:\n for d in descendants:\n assert d in anc\n\n nodes = []\n for d in descendants:\n v = d.rootpath(anc)\n if v:\n nodes.extend(v)\n nodes = set(nodes)\n nodes.remove(anc)\n self.trace_branches(nodes, width, color)\n\n def trace_branches(self, nodes, width=4, color=\"blue\"):\n n2c = self.n2c\n M = Path.MOVETO; L = Path.LINETO\n verts = []\n codes = []\n for c, pc in [ (n2c[x], n2c[x.parent]) for x in nodes\n if (x in n2c) and x.parent ]:\n x = c.x; y = c.y\n px = pc.x; py = pc.y\n verts.append((x, y)); codes.append(M)\n verts.append((px, y)); codes.append(L)\n verts.append((px, py)); codes.append(L)\n px, py = verts[-1]\n verts.append((px, py)); codes.append(M)\n\n p = PathPatch(Path(verts, codes), fill=False,\n linewidth=width, edgecolor=color)\n self.add_patch(p)\n self.figure.canvas.draw_idle()\n return p\n\n def highlight_selected_nodes(self, color=\"green\"):\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n get = self.n2c.get\n coords = list(filter(None, [ get(n) for n in self.selected_nodes ]))\n x = [ c.x for c in coords ]\n y = [ c.y for c in coords ]\n if x and y:\n self.__selected_highlight_patch = self.scatter(x, y, s=60, c=color,\n zorder=100)\n self.set_xlim(xlim)\n self.set_ylim(ylim)\n self.figure.canvas.draw_idle()\n\n def select_nodes(self, nodes=None, add=False):\n try:\n self.__selected_highlight_patch.remove()\n self.figure.canvas.draw_idle()\n except:\n pass\n if add:\n if nodes:\n self.selected_nodes = self.selected_nodes | nodes\n if hasattr(self, \"app\") and self.app:\n self.app.on_nodes_selected(self)\n self.highlight_selected_nodes()\n else:\n if nodes:\n self.selected_nodes = nodes\n if hasattr(self, \"app\") and self.app:\n self.app.on_nodes_selected(self)\n self.highlight_selected_nodes()\n else:\n self.selected_nodes = set()\n\n def rectselect(self, e0, e1):\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n s = set()\n x0, x1 = sorted((e0.xdata, e1.xdata))\n y0, y1 = sorted((e0.ydata, e1.ydata))\n add = e0.key == 'shift'\n for n, c in self.n2c.items():\n if (x0 < c.x < x1) and (y0 < c.y < y1):\n s.add(n)\n self.select_nodes(nodes = s, add = add)\n self.set_xlim(xlim)\n self.set_ylim(ylim)\n ## if s:\n ## print \"Selected:\"\n ## for n in s:\n ## print \" \", n\n\n def picked(self, e):\n if hasattr(self, \"app\") and self.app:\n self.app.picked(e)\n\n def window2data(self, expandx=1.0, expandy=1.0):\n \"\"\"\n return the data coordinates ((x0, y0),(x1, y1)) of the plot\n window, expanded by relative units of window size\n \"\"\"\n bb = self.get_window_extent()\n bbx = bb.expanded(expandx, expandy)\n return self.transData.inverted().transform(bbx.get_points())\n\n def get_visible_nodes(self, labeled_only=False):\n ## transform = self.transData.inverted().transform\n ## bb = self.get_window_extent()\n ## bbx = bb.expanded(1.1,1.1)\n ## ((x0, y0),(x1, y1)) = transform(bbx.get_points())\n ((x0, y0),(x1, y1)) = self.window2data(1.1, 1.1)\n #print \"visible_nodes points\", x0, x1, y0, y1\n\n if labeled_only:\n def f(v): return (y0 < v[0] < y1) and (v[2] in self.node2label)\n else:\n def f(v): return (y0 < v[0] < y1)\n for y, x, n in filter(f, self.coords):\n yield (n, x, y)\n\n def zoom_cxy(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view, with a fixed data point (cx, cy)\n \"\"\"\n transform = self.transData.inverted().transform\n xlim = self.get_xlim(); xmid = sum(xlim)*0.5\n ylim = self.get_ylim(); ymid = sum(ylim)*0.5\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = xmid-x0; deltay = ymid-y0\n cx = cx or xmid; cy = cy or ymid\n xoff = (cx-xmid)*x\n self.set_xlim(xmid-deltax+xoff, xmid+deltax+xoff)\n yoff = (cy-ymid)*y\n self.set_ylim(ymid-deltay+yoff, ymid+deltay+yoff)\n self.adjust_xspine()\n\n def zoom(self, x=0.1, y=0.1, cx=None, cy=None):\n \"\"\"\n Zoom the x and y axes in by the specified proportion of the\n current view.\n \"\"\"\n # get the function to convert display coordinates to data\n # coordinates\n transform = self.transData.inverted().transform\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n bb = self.get_window_extent()\n bbx = bb.expanded(1.0-x,1.0-y)\n points = transform(bbx.get_points())\n x0, x1 = points[:,0]; y0, y1 = points[:,1]\n deltax = x0 - xlim[0]; deltay = y0 - ylim[0]\n self.set_xlim(xlim[0]+deltax, xlim[1]-deltax)\n self.set_ylim(ylim[0]+deltay, ylim[1]-deltay)\n self.adjust_xspine()\n\n def center_y(self, y):\n \"\"\"\n Center the y-axis of the canvas on the given y value\n \"\"\"\n ymin, ymax = self.get_ylim()\n yoff = (ymax - ymin) * 0.5\n self.set_ylim(y-yoff, y+yoff)\n self.adjust_xspine()\n\n def center_x(self, x, offset=0.3):\n \"\"\"\n Center the x-axis of the canvas on the given x value\n \"\"\"\n xmin, xmax = self.get_xlim()\n xspan = xmax - xmin\n xoff = xspan*0.5 + xspan*offset\n self.set_xlim(x-xoff, x+xoff)\n self.adjust_xspine()\n\n def center_node(self, node):\n \"\"\"\n Center the canvas on the given node\n \"\"\"\n c = self.n2c[node]\n y = c.y\n self.center_y(y)\n x = c.x\n self.center_x(x, 0.2)\n\n def do_highlight_support(self):\n \"\"\"\n TODO: reconfigure this, insert into self.decorators\n \"\"\"\n if self.support:\n lim = float(self.support)\n\n M = Path.MOVETO; L = Path.LINETO\n\n verts = []; codes = []\n segments = []\n def f(n):\n if n.isleaf or not n.parent: return False\n try: return float(n.label) >= lim\n except:\n try: return float(n.support) >= lim\n except: pass\n return False\n\n for node, coords in [ x for x in self.n2c.items() if f(x[0]) ]:\n x = coords.x; y = coords.y\n p = node.parent\n pcoords = self.n2c[p]\n px = pcoords.x; py = y\n if self.app and self.app.radial:\n pc = self.n2c[node.parent]; theta2 = pc.angle\n px = math.cos(math.radians(coords.angle))*pc.depth\n py = math.sin(math.radians(coords.angle))*pc.depth\n\n ## segments.append([(x, y),(px, y)])\n verts.append((x,y)); codes.append(M)\n verts.append((px,py)); codes.append(L)\n\n if verts:\n patch = PathPatch(Path(verts, codes), fill=False,\n linewidth=3, edgecolor='black')\n self.add_patch(patch)\n\n ## self.add_artist(Line2D(\n ## [x,px], [y,py], lw=3, solid_capstyle=\"butt\", color=\"black\"\n ## ))\n\n def hl(self, s):\n nodes = self.root.findall(s)\n if nodes:\n self.highlight(nodes)\n\n def hlines(self, nodes, width=5, color=\"red\", xoff=0, yoff=0):\n offset = IdentityTransform()\n segs = []; w = []; o = []\n for n in filter(lambda x:x.parent, nodes):\n c = self.n2c[n]; p = self.n2c[n.parent]\n segs.append(((p.x,c.y),(c.x,c.y)))\n w.append(width); o.append((xoff,yoff))\n lc = LineCollection(segs, linewidths=w, transOffset=offset, offsets=o)\n lc.set_color(color)\n Axes.add_collection(self, lc)\n ## self.drawstack.append((\"hlines\", [nodes], dict(width=width,\n ## color=color,\n ## xoff=xoff,\n ## yoff=yoff)))\n self.figure.canvas.draw_idle()\n return lc\n\n def hardcopy(self, relwidth=0.5, leafpad=1.5):\n p = HC.TreeFigure(self.root, relwidth=relwidth, leafpad=leafpad,\n name=self.name, support=self.support,\n leaf_fontsize=self.leaf_fontsize,\n branch_fontsize=self.branch_fontsize,\n branch_width=self.branch_width,\n branch_color=self.branch_color,\n highlight_support=self.highlight_support,\n branchlabels=self.branchlabels,\n decorators=self.decorators,\n leaflabels=self.leaflabels,\n mark_named=self._mark_named,\n xlim=self.get_xlim(),\n ylim=self.get_ylim())\n return p\n\n def highlight(self, nodes=None, width=5, color=\"red\"):\n if self.highlightpatch:\n try:\n self.highlightpatch.remove()\n except:\n pass\n if not nodes:\n return\n\n if len(nodes)>1:\n mrca = self.root.mrca(nodes)\n if not mrca:\n return\n else:\n mrca = list(nodes)[0]\n\n M = Path.MOVETO; L = Path.LINETO\n verts = []\n codes = []\n seen = set()\n for node, coords in [ x for x in self.n2c.items() if x[0] in nodes ]:\n x = coords.x; y = coords.y\n p = node.parent\n while p:\n pcoords = self.n2c[p]\n px = pcoords.x; py = pcoords.y\n if node not in seen:\n verts.append((x, y)); codes.append(M)\n verts.append((px, y)); codes.append(L)\n verts.append((px, py)); codes.append(L)\n seen.add(node)\n if p == mrca or node == mrca:\n break\n node = p\n coords = self.n2c[node]\n x = coords.x; y = coords.y\n p = node.parent\n px, py = verts[-1]\n verts.append((px, py)); codes.append(M)\n\n self.highlightpath = Path(verts, codes)\n self.highlightpatch = PathPatch(\n self.highlightpath, fill=False, linewidth=width, edgecolor=color,\n capstyle='round', joinstyle='round'\n )\n return self.add_patch(self.highlightpatch)\n\n def find(self, s):\n \"\"\"\n Find node(s) matching pattern s and zoom to node(s)\n \"\"\"\n nodes = list(self.root.find(s))\n if nodes:\n self.zoom_nodes(nodes)\n\n def zoom_nodes(self, nodes, border=1.2):\n y0, y1 = self.get_ylim(); x0, x1 = self.get_xlim()\n y0 = max(0, y0); y1 = min(1, y1)\n\n n2c = self.n2c\n v = [ n2c[n] for n in nodes ]\n ymin = min([ c.y for c in v ])\n ymax = max([ c.y for c in v ])\n xmin = min([ c.x for c in v ])\n xmax = max([ c.x for c in v ])\n bb = Bbox(((xmin,ymin), (xmax, ymax)))\n\n # convert data coordinates to display coordinates\n transform = self.transData.transform\n disp_bb = [Bbox(transform(bb))]\n for n in nodes:\n if n.isleaf:\n txt = self.node2label[n]\n if txt.get_visible():\n disp_bb.append(txt.get_window_extent())\n\n disp_bb = Bbox.union(disp_bb).expanded(border, border)\n\n # convert back to data coordinates\n points = self.transData.inverted().transform(disp_bb)\n x0, x1 = points[:,0]\n y0, y1 = points[:,1]\n self.set_xlim(x0, x1)\n self.set_ylim(y0, y1)\n\n def zoom_clade(self, anc, border=1.2):\n if anc.isleaf:\n self.center_node(anc)\n\n else:\n self.zoom_nodes(list(anc), border)\n\n def draw_leaf_labels(self, *args):\n leaves = list(filter(lambda x:x[0].isleaf,\n self.get_visible_nodes(labeled_only=True)))\n psep = self.leaf_pixelsep()\n fontsize = min(self.leaf_fontsize, max(psep, 8))\n n2l = self.node2label\n transform = self.transData.transform\n sub = operator.sub\n\n for n in leaves:\n n2l[n[0]].set_visible(False)\n\n # draw leaves\n leaves_drawn = []\n for n, x, y in leaves:\n txt = self.node2label[n]\n if not leaves_drawn:\n txt.set_visible(True)\n leaves_drawn.append(txt)\n self.figure.canvas.draw_idle()\n continue\n\n txt2 = leaves_drawn[-1]\n y0 = y; y1 = txt2.xy[1]\n sep = sub(*transform(([0,y0],[0,y1]))[:,1])\n if sep > fontsize:\n txt.set_visible(True)\n txt.set_size(fontsize)\n leaves_drawn.append(txt)\n self.figure.canvas.draw_idle()\n\n if leaves_drawn:\n leaves_drawn[0].set_size(fontsize)\n\n return fontsize\n\n def draw_labels(self, *args):\n fs = max(10, self.draw_leaf_labels())\n nodes = self.get_visible_nodes(labeled_only=True)\n ## print [ x[0].id for x in nodes ]\n branches = list(filter(lambda x:(not x[0].isleaf), nodes))\n n2l = self.node2label\n for n, x, y in branches:\n t = n2l[n]\n t.set_visible(True)\n t.set_size(fs)\n\n def unclutter(self, *args):\n nodes = self.get_visible_nodes(labeled_only=True)\n branches = list(filter(lambda x:(not x[0].isleaf), nodes))\n psep = self.leaf_pixelsep()\n n2l = self.node2label\n fontsize = min(self.leaf_fontsize*1.2, max(psep, self.leaf_fontsize))\n\n drawn = []\n for n, x, y in branches:\n txt = n2l[n]\n try:\n bb = txt.get_window_extent().expanded(2, 2)\n vis = True\n for n2 in reversed(drawn):\n txt2 = n2l[n2]\n if bb.overlaps(txt2.get_window_extent()):\n txt.set_visible(False)\n vis = False\n self.figure.canvas.draw_idle()\n break\n if vis:\n txt.set_visible(True)\n txt.set_size(fontsize)\n self.figure.canvas.draw_idle()\n drawn.append(n)\n except RuntimeError:\n pass\n ## txt.set_visible(True)\n ## txt.set_size(fontsize)\n ## drawn.append(n)\n ## self.figure.canvas.draw_idle()\n\n def leaf_pixelsep(self):\n y0, y1 = self.get_ylim()\n y0 = max(0, y0)\n y1 = min(self.nleaves, y1)\n display_points = self.transData.transform(((0, y0), (0, y1)))\n # height in pixels (visible y data extent)\n height = operator.sub(*reversed(display_points[:,1]))\n pixelsep = height/((y1-y0)/self.leaf_hsep)\n return pixelsep\n\n def ypp(self):\n y0, y1 = self.get_ylim()\n p0, p1 = self.transData.transform(((0, y0), (0, y1)))[:,1]\n return (y1-y0)/float(p1-p0)\n\n def draw_labels_old(self, *args):\n if self.nleaves:\n y0, y1 = self.get_ylim()\n y0 = max(0, y0); y1 = min(1, y1)\n\n display_points = self.transData.transform(((0, y0), (0, y1)))\n # height in pixels (visible y data extent)\n height = operator.sub(*reversed(display_points[:,1]))\n pixelsep = height/((y1-y0)/self.leaf_hsep)\n fontsize = min(max(pixelsep-2, 8), 12)\n\n if pixelsep >= 8:\n for node, txt in self.node2label.items():\n if node.isleaf:\n if self.leaflabels:\n c = self.n2c[node]\n x = c.x; y = c.y\n if (y0 < y < y1):\n txt.set_size(fontsize)\n txt.set_visible(True)\n else:\n if self.branchlabels:\n c = self.n2c[node]\n x = c.x; y = c.y\n if (y0 < y < y1):\n txt.set_size(fontsize)\n txt.set_visible(True)\n elif pixelsep >= 4:\n for node, txt in self.node2label.items():\n if node.isleaf:\n txt.set_visible(False)\n else:\n if self.branchlabels:\n c = self.n2c[node]\n x = c.x; y = c.y\n if (y0 < y < y1):\n txt.set_size(fontsize)\n txt.set_visible(True)\n else:\n for node, txt in self.node2label.items():\n txt.set_visible(False)\n self.figure.canvas.draw_idle()\n\n def redraw(self, home=False, layout=True):\n \"\"\"\n Replot the tree\n \"\"\"\n xlim = self.get_xlim()\n ylim = self.get_ylim()\n self.cla()\n if layout:\n self.layout()\n self.plot_tree()\n if self.interactive:\n self.callbacks.connect(\"ylim_changed\", self.draw_labels)\n\n if home:\n self.home()\n else:\n self.set_xlim(*xlim)\n self.set_ylim(*ylim)\n\n def set_name(self, name):\n self.name = name\n if name:\n at = AnchoredText(\n self.name, loc=2, frameon=True,\n prop=dict(size=12, weight=\"bold\")\n )\n at.patch.set_linewidth(0)\n at.patch.set_facecolor(\"white\")\n at.patch.set_alpha(0.6)\n self.add_artist(at)\n return at\n\n def _path_to_parent(self, node):\n \"\"\"\n For use in drawing branches\n \"\"\"\n c = self.n2c[node]; x = c.x; y = c.y\n pc = self.n2c[node.parent]; px = pc.x; py = pc.y\n M = Path.MOVETO; L = Path.LINETO\n verts = [(x, y), (px, y), (px, py)]\n codes = [M, L, L]\n return verts, codes\n ## return [PathPatch(Path(verts, codes), fill=False,\n ## linewidth=width or self.branch_width,\n ## edgecolor=color or self.branch_color)]\n\n\n def layout(self):\n self.n2c = cartesian(self.root, scaled=self.scaled, yunit=1.0,\n smooth=self.smooth_xpos)\n for c in self.n2c.values():\n c.x += self.xoff; c.y += self.yoff\n sv = sorted([\n [c.y, c.x, n] for n, c in self.n2c.items()\n ])\n self.coords = sv#numpy.array(sv)\n ## n2c = self.n2c\n ## self.node2linesegs = {}\n ## for node, coords in n2c.items():\n ## x = coords.x; y = coords.y\n ## v = [(x,y)]\n ## if node.parent:\n ## pcoords = n2c[node.parent]\n ## px = pcoords.x; py = pcoords.y\n ## v.append((px,y))\n ## v.append((px,py))\n ## self.node2linesegs[node] = v\n\n def set_root(self, root):\n self.root = root\n self.leaves = root.leaves()\n self.nleaves = len(self.leaves)\n self.leaf_hsep = 1.0#/float(self.nleaves)\n\n for n in root.descendants():\n if n.length is None:\n self.scaled=False; break\n self.layout()\n\n def plot_tree(self, root=None, **kwargs):\n \"\"\"\n Draw branches and labels\n \"\"\"\n if root and not self.root:\n self.set_root(root)\n\n if self.interactive: pyplot.ioff()\n\n if \"branchlabels\" in kwargs:\n self.branchlabels = kwargs[\"branchlabels\"]\n if \"leaflabels\" in kwargs:\n self.leaflabels = kwargs[\"leaflabels\"]\n self.yaxis.set_visible(False)\n self.create_branch_artists()\n self.create_label_artists()\n if self.highlight_support:\n self.do_highlight_support()\n self.mark_named()\n ## self.home()\n\n for k, v in self.decorators:\n func, args, kwargs = v\n func(self, *args, **kwargs)\n\n self.set_name(self.name)\n self.adjust_xspine()\n\n if self.interactive: pyplot.ion()\n\n labels = [ x.label for x in self.root.leaves() ]\n def fmt(x, pos=None):\n if x<0: return \"\"\n try: return labels[int(round(x))]\n except: pass\n return \"\"\n #self.yaxis.set_major_formatter(FuncFormatter(fmt))\n\n return self\n\n def clade_dimensions(self):\n n2c = self.n2c\n d = {}\n def recurse(n, n2c, d):\n v = []\n for c in n.children:\n recurse(c, n2c, d)\n if c.isleaf:\n x, y = n2c[c].point()\n x0 = x1 = x; y0 = y1 = y\n else:\n x0, x1, y0, y1 = d[c]\n v.append((x0, x1, y0, y1))\n if v:\n x0 = n2c[n].x\n x1 = max([ x[1] for x in v ])\n y0 = min([ x[2] for x in v ])\n y1 = max([ x[3] for x in v ])\n d[n] = (x0, x1, y0, y1)\n recurse(self.root, n2c, d)\n return d\n\n def clade_height_pixels(self):\n ypp = self.ypp()\n d = self.clade_dimensions()\n h = {}\n for n, (x0, x1, y0, y1) in d.items():\n h[n] = (y1-y0)/ypp\n return h\n\n def _decimate_nodes(self, n=500):\n leaves = self.leaves\n nleaves = len(leaves)\n if nleaves > n:\n indices = numpy.linspace(0, nleaves-1, n).astype(int)\n leaves = [ leaves[i] for i in indices ]\n return set(list(chain.from_iterable([ list(x.rootpath())\n for x in leaves ])))\n else:\n return self.root\n\n def create_branch_artists(self):\n \"\"\"\n Use MPL Paths to draw branches\n \"\"\"\n ## patches = []\n verts = []; codes = []\n for node in self.root.descendants():\n v, c = self._path_to_parent(node)\n verts.extend(v); codes.extend(c)\n self.branchpatch = PathPatch(\n Path(verts, codes), fill=False,\n linewidth=self.branch_width,\n edgecolor=self.branch_color\n )\n self.add_patch(self.branchpatch)\n ## for node in self._decimate_nodes():\n ## if node.parent:\n ## for p in self._path_to_parent(node):\n ## patches.append(p)\n ## self.branch_patches = PatchCollection(patches, match_original=True)\n ## self.add_collection(self.branch_patches)\n\n ## print \"enter: create_branch_artists\"\n ## self.node2branch = {}\n ## for node, segs in self.node2linesegs.items():\n ## line = Line2D(\n ## [x[0] for x in segs], [x[1] for x in segs],\n ## lw=self.branch_width, color=self.branch_color\n ## )\n ## line.set_visible(False)\n ## Axes.add_artist(self, line)\n ## self.node2branch[node] = line\n\n ## d = self.node2linesegs\n ## segs = [ d[n] for n in self.root if (n in d) ]\n\n ## dims = self.clade_dimensions(); ypp = self.ypp()\n ## def recurse(n, dims, clades, terminals):\n ## stop = False\n ## h = None\n ## v = dims.get(n)\n ## if v: h = (v[3]-v[2])/ypp\n ## if (h and (h < 20)) or (not h):\n ## stop = True\n ## terminals.append(n)\n ## if not stop:\n ## clades.append(n)\n ## for c in n.children:\n ## recurse(c, dims, clades, terminals)\n ## clades = []; terminals = []\n ## recurse(self.root, dims, clades, terminals)\n ## segs = [ d[n] for n in self.root if (n in d) and (n in clades) ]\n ## for t in terminals:\n ## if t.isleaf:\n ## segs.append(d[t])\n ## else:\n ## x0, x1, y0, y1 = dims[t]\n ## x, y = self.n2c[t].point()\n ## px, py = self.n2c[t.parent].point()\n ## segs.append(((px,py), (px,y), (x,y), (x1, y0), (x1,y1), (x,y)))\n\n ## lc = LineCollection(segs, linewidths=self.branch_width,\n ## colors = self.branch_color)\n ## self.branches_linecollection = Axes.add_collection(self, lc)\n ## print \"leave: create_branch_artists\"\n\n def create_label_artists(self):\n ## print \"enter: create_label_artists\"\n self.node2label = {}\n n2c = self.n2c\n for node, coords in n2c.items():\n x = coords.x; y = coords.y\n if node.isleaf and node.label and self.leaflabels:\n txt = self.annotate(\n node.label,\n xy=(x, y),\n xytext=(self.leaf_offset, 0),\n textcoords=\"offset points\",\n verticalalignment=self.leaf_valign,\n horizontalalignment=self.leaf_halign,\n fontsize=self.leaf_fontsize,\n clip_on=True,\n picker=True\n )\n txt.node = node\n txt.set_visible(False)\n self.node2label[node] = txt\n\n if (not node.isleaf) and node.label and self.branchlabels:\n txt = self.annotate(\n node.label,\n xy=(x, y),\n xytext=(self.branch_offset,0),\n textcoords=\"offset points\",\n verticalalignment=self.branch_valign,\n horizontalalignment=self.branch_halign,\n fontsize=self.branch_fontsize,\n bbox=dict(fc=\"lightyellow\", ec=\"none\", alpha=0.8),\n clip_on=True,\n picker=True\n )\n ## txt.set_visible(False)\n txt.node = node\n self.node2label[node] = txt\n ## print \"leave: create_label_artists\"\n\n def adjust_xspine(self):\n v = sorted([ c.x for c in self.n2c.values() ])\n try:\n self.spines[\"bottom\"].set_bounds(v[0],v[-1])\n except AttributeError:\n pass\n for t,n,s in self.xaxis.iter_ticks():\n if (n > v[-1]) or (n < v[0]):\n t.set_visible(False)\n\n def mark_named(self):\n if self._mark_named:\n n2c = self.n2c\n cv = [ c for n, c in n2c.items() if n.label and (not n.isleaf) ]\n x = [ c.x for c in cv ]\n y = [ c.y for c in cv ]\n if x and y:\n self.scatter(x, y, s=5, color='black')\n\n def home(self):\n td = self.transData\n trans = td.inverted().transform\n xmax = xmin = ymax = ymin = 0\n if self.node2label:\n try:\n v = [ x.get_window_extent() for x in self.node2label.values()\n if x.get_visible() ]\n if v:\n xmax = trans((max([ x.xmax for x in v ]),0))[0]\n xmin = trans((min([ x.xmin for x in v ]),0))[0]\n except RuntimeError:\n pass\n\n v = self.n2c.values()\n ymin = min([ c.y for c in v ])\n ymax = max([ c.y for c in v ])\n xmin = min(xmin, min([ c.x for c in v ]))\n xmax = max(xmax, max([ c.x for c in v ]))\n xspan = xmax - xmin; xpad = xspan*0.05\n yspan = ymax - ymin; ypad = yspan*0.05\n self.set_xlim(xmin-xpad, xmax+xpad*2)\n self.set_ylim(ymin-ypad, ymax+ypad)\n self.adjust_xspine()\n\n def scroll(self, x, y):\n x0, x1 = self.get_xlim()\n y0, y1 = self.get_ylim()\n xd = (x1-x0)*x\n yd = (y1-y0)*y\n self.set_xlim(x0+xd, x1+xd)\n self.set_ylim(y0+yd, y1+yd)\n self.adjust_xspine()\n\n def plot_labelcolor(self, nodemap, state2color=None):\n if state2color is None:\n c = colors.tango()\n states = sorted(set(nodemap.values()))\n state2color = dict(zip(states, c))\n\n for node, txt in self.node2label.items():\n s = nodemap.get(node)\n if s is not None:\n c = state2color[s]\n if c:\n txt.set_color(c)\n self.figure.canvas.draw_idle()\n\n def node_image(self, node, imgfile, maxdim=100, border=0):\n xoff = self.leaf_offset\n n = self.root[node]; c = self.n2c[n]; p = (c.x, c.y)\n img = Image.open(imgfile)\n if max(img.size) > maxdim:\n img.thumbnail((maxdim, maxdim))\n imgbox = OffsetImage(img)\n xycoords = self.node2label.get(node) or \"data\"\n if xycoords != \"data\": p = (1, 0.5)\n abox = AnnotationBbox(imgbox, p,\n xybox=(xoff, 0.0),\n xycoords=xycoords,\n box_alignment=(0.0,0.5),\n pad=0.0,\n boxcoords=(\"offset points\"))\n self.add_artist(abox)\n\n def plot_discrete(self, data, cmap=None, name=None,\n xoff=10, yoff=0, size=15, legend=1):\n root = self.root\n if cmap is None:\n import ivy\n c = colors.tango()\n states = sorted(set(data.values()))\n cmap = dict(zip(states, c))\n n2c = self.n2c\n points = []; c = []\n d = dict([ (n, data.get(n)) for n in root if data.get(n) is not None ])\n for n, v in d.items():\n coord = n2c[n]\n points.append((coord.x, coord.y)); c.append(cmap[v])\n\n boxes = symbols.squares(self, points, c, size, xoff=xoff, yoff=yoff)\n\n if legend:\n handles = []; labels = []\n for v, c in sorted(cmap.items()):\n handles.append(Rectangle((0,0),0.5,1,fc=c))\n labels.append(str(v))\n self.legend(handles, labels, loc=legend)\n\n self.figure.canvas.draw_idle()\n return boxes\n\n def plot_continuous(self, data, mid=None, name=None, cmap=None,\n size=15, colorbar=True):\n area = (size*0.5)*(size*0.5)*numpy.pi\n values = data.values()\n vmin = min(values); vmax = max(values)\n if mid is None:\n mid = (vmin+vmax)*0.5\n delta = vmax-vmin*0.5\n else:\n delta = max(abs(vmax-mid), abs(vmin-mid))\n norm = mpl_colors.Normalize(mid-delta, mid+delta)\n ## if cmap is None: cmap = mpl_colormap.binary\n if cmap is None: cmap = mpl_colormap.hot\n n2c = self.n2c\n X = numpy.array(\n [ (n2c[n].x, n2c[n].y, v) for n, v in data.items() if n in n2c ]\n )\n circles = self.scatter(\n X[:,0], X[:,1], s=area, c=X[:,2], cmap=cmap, norm=norm,\n zorder=1000\n )\n if colorbar:\n cbar = self.figure.colorbar(circles, ax=self, shrink=0.7)\n if name:\n cbar.ax.set_xlabel(name)\n\n self.figure.canvas.draw_idle()\n\nclass RadialTree(Tree):\n def layout(self):\n from ..layout_polar import calc_node_positions\n start = self.start if hasattr(self, 'start') else 0\n end = self.end if hasattr(self, 'end') else None\n self.n2c = calc_node_positions(self.root, scaled=self.scaled,\n start=start, end=end)\n sv = sorted([\n [c.y, c.x, n] for n, c in self.n2c.items()\n ])\n self.coords = sv\n\n ## def _path_to_parent(self, node, width=None, color=None):\n ## c = self.n2c[node]; theta1 = c.angle; r = c.depth\n ## M = Path.MOVETO; L = Path.LINETO\n ## pc = self.n2c[node.parent]; theta2 = pc.angle\n ## px1 = math.cos(math.radians(c.angle))*pc.depth\n ## py1 = math.sin(math.radians(c.angle))*pc.depth\n ## verts = [(c.x,c.y),(px1,py1)]; codes = [M,L]\n ## #verts.append((pc.x,pc.y)); codes.append(L)\n ## path = PathPatch(Path(verts, codes), fill=False,\n ## linewidth=width or self.branch_width,\n ## edgecolor=color or self.branch_color)\n ## diam = pc.depth*2\n ## t1, t2 = tuple(sorted((theta1,theta2)))\n ## arc = Arc((0,0), diam, diam, theta1=t1, theta2=t2,\n ## edgecolor=color or self.branch_color,\n ## linewidth=width or self.branch_width)\n ## return [path, arc]\n\n def _path_to_parent(self, node):\n c = self.n2c[node]; theta1 = c.angle; r = c.depth\n M = Path.MOVETO; L = Path.LINETO\n pc = self.n2c[node.parent]; theta2 = pc.angle\n px1 = math.cos(math.radians(c.angle))*pc.depth\n py1 = math.sin(math.radians(c.angle))*pc.depth\n verts = [(c.x,c.y),(px1,py1)]; codes = [M,L]\n t1, t2 = tuple(sorted((theta1,theta2)))\n diam = pc.depth*2\n arc = Arc((0,0), diam, diam, theta1=t1, theta2=t2)\n arcpath = arc.get_path()\n av = arcpath.vertices * pc.depth\n ac = arcpath.codes\n verts.extend(av.tolist())\n codes.extend(ac.tolist())\n return verts, codes\n\n def highlight(self, nodes=None, width=5, color=\"red\"):\n if self.highlightpatch:\n try:\n self.highlightpatch.remove()\n except:\n pass\n if not nodes:\n return\n\n if len(nodes)>1:\n mrca = self.root.mrca(nodes)\n if not mrca:\n return\n else:\n mrca = list(nodes)[0]\n\n M = Path.MOVETO; L = Path.LINETO\n verts = []\n codes = []\n seen = set()\n patches = []\n for node, coords in [ x for x in self.n2c.items() if x[0] in nodes ]:\n x = coords.x; y = coords.y\n p = node.parent\n while p:\n pcoords = self.n2c[p]\n px = pcoords.x; py = pcoords.y\n if node not in seen:\n v, c = self._path_to_parent(node)\n verts.extend(v)\n codes.extend(c)\n seen.add(node)\n if p == mrca or node == mrca:\n break\n node = p\n coords = self.n2c[node]\n x = coords.x; y = coords.y\n p = node.parent\n ## px, py = verts[-1]\n ## verts.append((px, py)); codes.append(M)\n self.highlightpath = Path(verts, codes)\n self.highlightpatch = PathPatch(\n self.highlightpath, fill=False, linewidth=width, edgecolor=color\n )\n self.add_patch(self.highlightpatch)\n ## self.highlight_patches = PatchCollection(patches, match_original=True)\n ## self.add_collection(self.highlight_patches)\n\n\nclass OverviewTree(Tree):\n def __init__(self, *args, **kwargs):\n kwargs[\"leaflabels\"] = False\n kwargs[\"branchlabels\"] = False\n Tree.__init__(self, *args, **kwargs)\n self.xaxis.set_visible(False)\n self.spines[\"bottom\"].set_visible(False)\n self.add_overview_rect()\n\n def set_target(self, target):\n self.target = target\n\n def add_overview_rect(self):\n rect = UpdatingRect([0, 0], 0, 0, facecolor='black', edgecolor='red')\n rect.set_alpha(0.2)\n rect.target = self.target\n rect.set_bounds(*self.target.viewLim.bounds)\n self.zoomrect = rect\n self.add_patch(rect)\n ## if pyplot.isinteractive():\n self.target.callbacks.connect('xlim_changed', rect)\n self.target.callbacks.connect('ylim_changed', rect)\n\n def redraw(self):\n Tree.redraw(self)\n self.add_overview_rect()\n self.figure.canvas.draw_idle()\n\ndef axes_enter(e):\n ax = e.inaxes\n ax._active = True\n\ndef axes_leave(e):\n ax = e.inaxes\n ax._active = False\n\ndef onselect(estart, estop):\n b = estart.button\n ## print b, estart.key\n\ndef onkeypress(e):\n ax = e.inaxes\n k = e.key\n if ax and k == 't':\n ax.home()\n if ax and k == \"down\":\n ax.scroll(0, -0.1)\n ax.figure.canvas.draw_idle()\n if ax and k == \"up\":\n ax.scroll(0, 0.1)\n ax.figure.canvas.draw_idle()\n if ax and k == \"left\":\n ax.scroll(-0.1, 0)\n ax.figure.canvas.draw_idle()\n if ax and k == \"right\":\n ax.scroll(0.1, 0)\n ax.figure.canvas.draw_idle()\n if ax and k and k in '=+':\n ax.zoom(0.1,0.1)\n if ax and k == '-':\n ax.zoom(-0.1,-0.1)\n\ndef ondrag(e):\n ax = e.inaxes\n button = e.button\n if ax and button == 2:\n if not ax.pan_start:\n ax.pan_start = (e.xdata, e.ydata)\n return\n x, y = ax.pan_start\n xdelta = x - e.xdata\n ydelta = y - e.ydata\n x0, x1 = ax.get_xlim()\n xspan = x1-x0\n y0, y1 = ax.get_ylim()\n yspan = y1 - y0\n midx = (x1+x0)*0.5\n midy = (y1+y0)*0.5\n ax.set_xlim(midx+xdelta-xspan*0.5, midx+xdelta+xspan*0.5)\n ax.set_ylim(midy+ydelta-yspan*0.5, midy+ydelta+yspan*0.5)\n ax.adjust_xspine()\n\ndef onbuttonrelease(e):\n ax = e.inaxes\n button = e.button\n if button == 2:\n ## print \"pan end\"\n ax.pan_start = None\n\ndef onpick(e):\n ax = e.mouseevent.inaxes\n if ax:\n ax.picked(e)\n\ndef onscroll(e):\n ax = e.inaxes\n if ax:\n b = e.button\n ## print b\n k = e.key\n if k == None and b ==\"up\":\n ax.zoom(0.1,0.1)\n if k == None and b ==\"down\":\n ax.zoom(-0.1,-0.1)\n if k == \"shift\" and b == \"up\":\n ax.zoom_cxy(0.1, 0, e.xdata, e.ydata)\n if k == \"shift\" and b == \"down\":\n ax.zoom_cxy(-0.1, 0, e.xdata, e.ydata)\n if k == \"control\" and b == \"up\":\n ax.zoom_cxy(0, 0.1, e.xdata, e.ydata)\n if k == \"control\" and b == \"down\":\n ax.zoom_cxy(0, -0.1, e.xdata, e.ydata)\n if k == \"d\" and b == \"up\":\n ax.scroll(0, 0.1)\n if (k == \"d\" and b == \"down\"):\n ax.scroll(0, -0.1)\n if k == \"c\" and b == \"up\":\n ax.scroll(-0.1, 0)\n if k == \"c\" and b == \"down\":\n ax.scroll(0.1, 0)\n try: ax.adjust_xspine()\n except: pass\n ax.figure.canvas.draw_idle()\n\ndef onclick(e):\n ax = e.inaxes\n if ax and e.button==1 and hasattr(ax, \"zoomrect\") and ax.zoomrect:\n # overview clicked; reposition zoomrect\n r = ax.zoomrect\n x = e.xdata\n y = e.ydata\n arr = ax.transData.inverted().transform(r.get_extents())\n xoff = (arr[1][0]-arr[0][0])*0.5\n yoff = (arr[1][1]-arr[0][1])*0.5\n r.target.set_xlim(x-xoff,x+xoff)\n r.target.set_ylim(y-yoff,y+yoff)\n r(r.target)\n ax.figure.canvas.draw_idle()\n\n if ax and e.button==2:\n ## print \"pan start\", (e.xdata, e.ydata)\n ax.pan_start = (e.xdata, e.ydata)\n\n\ndef test_decorate(treeplot):\n import evolve\n data = evolve.brownian(treeplot.root)\n values = data.values()\n vmin = min(values); vmax = max(values)\n norm = mpl_colors.Normalize(vmin, vmax)\n cmap = mpl_colormap.binary\n n2c = treeplot.n2c\n X = numpy.array(\n [ (n2c[n].x, n2c[n].y, v)\n for n, v in data.items() if n in n2c ]\n )\n circles = treeplot.scatter(\n X[:,0], X[:,1], s=200, c=X[:,2], cmap=cmap, norm=norm,\n zorder=100\n )\n\nclass Decorator(object):\n def __init__(self, treeplot):\n self.plot = treeplot\n\nclass VisToggle(object):\n def __init__(self, name, treeplot=None, value=False):\n self.name = name\n self.plot = treeplot\n self.value = value\n\n def __nonzero__(self):\n return self.value\n\n def __repr__(self):\n return \"%s: %s\" % (self.name, self.value)\n\n def redraw(self):\n if self.plot:\n self.plot.redraw()\n\n def toggle(self):\n self.value = not self.value\n self.redraw()\n\n def show(self):\n if self.value == False:\n self.value = True\n self.redraw()\n\n def hide(self):\n if self.value == True:\n self.value = False\n self.redraw()\n\n\nTreePlot = subplot_class_factory(Tree)\nRadialTreePlot = subplot_class_factory(RadialTree)\nOverviewTreePlot = subplot_class_factory(OverviewTree)\n\nif __name__ == \"__main__\":\n import evolve\n root, data = evolve.test_brownian()\n plot_continuous(root, data, name=\"Brownian\", mid=0.0)\n"}}
-{"repo": "ewhauser/zookeeper", "pr_number": 37, "title": "Updated .Net library to .netstandard2.0", "state": "closed", "merged_at": null, "additions": 97, "deletions": 31594, "files_changed": ["src/dotnet/ZooKeeperNet.Recipes.Tests/Properties/AssemblyInfo.cs", "src/dotnet/ZooKeeperNet.Recipes/Properties/AssemblyInfo.cs", "src/dotnet/ZooKeeperNet.Tests/AbstractZooKeeperTests.cs", "src/dotnet/ZooKeeperNet.Tests/Properties/AssemblyInfo.cs", "src/dotnet/ZooKeeperNet.Tests/RecoveryTest.cs", "src/dotnet/ZooKeeperNet/Properties/AssemblyInfo.cs"], "files_before": {"src/dotnet/ZooKeeperNet.Recipes.Tests/Properties/AssemblyInfo.cs": "\ufeffusing System.Reflection;\r\nusing System.Runtime.CompilerServices;\r\nusing System.Runtime.InteropServices;\r\n\r\n// General Information about an assembly is controlled through the following \r\n// set of attributes. Change these attribute values to modify the information\r\n// associated with an assembly.\r\n[assembly: AssemblyTitle(\"SharpKeeperRecipes.Tests\")]\r\n[assembly: AssemblyDescription(\"\")]\r\n[assembly: AssemblyConfiguration(\"\")]\r\n[assembly: AssemblyCompany(\"Microsoft\")]\r\n[assembly: AssemblyProduct(\"SharpKeeperRecipes.Tests\")]\r\n[assembly: AssemblyCopyright(\"Copyright \u00a9 Microsoft 2010\")]\r\n[assembly: AssemblyTrademark(\"\")]\r\n[assembly: AssemblyCulture(\"\")]\r\n\r\n// Setting ComVisible to false makes the types in this assembly not visible \r\n// to COM components. If you need to access a type in this assembly from \r\n// COM, set the ComVisible attribute to true on that type.\r\n[assembly: ComVisible(false)]\r\n\r\n// The following GUID is for the ID of the typelib if this project is exposed to COM\r\n[assembly: Guid(\"a1b5ade2-64f4-46ee-b8eb-067c351afb45\")]\r\n\r\n// Version information for an assembly consists of the following four values:\r\n//\r\n// Major Version\r\n// Minor Version \r\n// Build Number\r\n// Revision\r\n//\r\n// You can specify all the values or you can default the Build and Revision Numbers \r\n// by using the '*' as shown below:\r\n// [assembly: AssemblyVersion(\"1.0.*\")]\r\n[assembly: AssemblyVersion(\"1.0.0.0\")]\r\n[assembly: AssemblyFileVersion(\"1.0.0.0\")]\r\n", "src/dotnet/ZooKeeperNet.Recipes/Properties/AssemblyInfo.cs": "\ufeffusing System.Reflection;\r\nusing System.Runtime.CompilerServices;\r\nusing System.Runtime.InteropServices;\r\n\r\n// General Information about an assembly is controlled through the following \r\n// set of attributes. Change these attribute values to modify the information\r\n// associated with an assembly.\r\n[assembly: AssemblyTitle(\"ZooKeeperNet.Recipes\")]\r\n[assembly: AssemblyConfiguration(\"\")]\r\n[assembly: AssemblyCulture(\"\")]\r\n// The following GUID is for the ID of the typelib if this project is exposed to COM\r\n[assembly: Guid(\"2fa24a05-e792-49c5-908a-9ae26ecb6401\")]", "src/dotnet/ZooKeeperNet.Tests/AbstractZooKeeperTests.cs": "/*\r\n * Licensed to the Apache Software Foundation (ASF) under one or more\r\n * contributor license agreements. See the NOTICE file distributed with\r\n * this work for additional information regarding copyright ownership.\r\n * The ASF licenses this file to You under the Apache License, Version 2.0\r\n * (the \"License\"); you may not use this file except in compliance with\r\n * the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n *\r\n */\r\nnamespace ZooKeeperNet.Tests\r\n{\r\n using System;\r\n using System.Runtime.CompilerServices;\r\n using System.Threading;\r\n using log4net.Config;\r\n\r\n public abstract class AbstractZooKeeperTests\r\n {\r\n static AbstractZooKeeperTests()\r\n {\r\n XmlConfigurator.Configure(); \r\n }\r\n\r\n protected static readonly TimeSpan CONNECTION_TIMEOUT = new TimeSpan(0, 0, 0, 0, 10000);\r\n\r\n protected virtual ZooKeeper CreateClient()\r\n {\r\n CountdownWatcher watcher = new CountdownWatcher();\r\n return new ZooKeeper(\"127.0.0.1:2181\", new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n protected virtual ZooKeeper CreateClient(string node)\r\n {\r\n CountdownWatcher watcher = new CountdownWatcher();\r\n return new ZooKeeper(\"127.0.0.1:2181\" + node, new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n protected ZooKeeper CreateClient(IWatcher watcher)\r\n {\r\n return new ZooKeeper(\"127.0.0.1:2181\", new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n protected ZooKeeper CreateClientWithAddress(string address)\r\n {\r\n CountdownWatcher watcher = new CountdownWatcher();\r\n return new ZooKeeper(address, new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n public class CountdownWatcher : IWatcher\r\n {\r\n readonly ManualResetEvent resetEvent = new ManualResetEvent(false);\r\n private static readonly object sync = new object();\r\n\r\n volatile bool connected;\r\n\r\n public CountdownWatcher()\r\n {\r\n Reset();\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n public void Reset()\r\n {\r\n resetEvent.Set();\r\n connected = false;\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n public virtual void Process(WatchedEvent @event)\r\n {\r\n if (@event.State == KeeperState.SyncConnected)\r\n {\r\n connected = true;\r\n lock (sync)\r\n {\r\n Monitor.PulseAll(sync);\r\n }\r\n resetEvent.Set();\r\n }\r\n else\r\n {\r\n connected = false;\r\n lock (sync)\r\n {\r\n Monitor.PulseAll(sync);\r\n }\r\n }\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n bool IsConnected()\r\n {\r\n return connected;\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n void waitForConnected(TimeSpan timeout)\r\n {\r\n DateTime expire = DateTime.UtcNow + timeout;\r\n TimeSpan left = timeout;\r\n while (!connected && left.TotalMilliseconds > 0)\r\n {\r\n lock (sync)\r\n {\r\n Monitor.TryEnter(sync, left);\r\n }\r\n left = expire - DateTime.UtcNow;\r\n }\r\n if (!connected)\r\n {\r\n throw new TimeoutException(\"Did not connect\");\r\n\r\n }\r\n }\r\n\r\n void waitForDisconnected(TimeSpan timeout)\r\n {\r\n DateTime expire = DateTime.UtcNow + timeout;\r\n TimeSpan left = timeout;\r\n while (connected && left.TotalMilliseconds > 0)\r\n {\r\n lock (sync)\r\n {\r\n Monitor.TryEnter(sync, left);\r\n }\r\n left = expire - DateTime.UtcNow;\r\n }\r\n if (connected)\r\n {\r\n throw new TimeoutException(\"Did not disconnect\");\r\n }\r\n }\r\n }\r\n\r\n }\r\n}\r\n", "src/dotnet/ZooKeeperNet.Tests/Properties/AssemblyInfo.cs": "/*\r\n * Licensed to the Apache Software Foundation (ASF) under one or more\r\n * contributor license agreements. See the NOTICE file distributed with\r\n * this work for additional information regarding copyright ownership.\r\n * The ASF licenses this file to You under the Apache License, Version 2.0\r\n * (the \"License\"); you may not use this file except in compliance with\r\n * the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n *\r\n */\r\n\ufeffusing System.Reflection;\r\nusing System.Runtime.CompilerServices;\r\nusing System.Runtime.InteropServices;\r\n\r\n// General Information about an assembly is controlled through the following \r\n// set of attributes. Change these attribute values to modify the information\r\n// associated with an assembly.\r\n[assembly: AssemblyTitle(\"SharpKeeper.Tests\")]\r\n[assembly: AssemblyDescription(\"\")]\r\n[assembly: AssemblyConfiguration(\"\")]\r\n[assembly: AssemblyCompany(\"Microsoft\")]\r\n[assembly: AssemblyProduct(\"SharpKeeper.Tests\")]\r\n[assembly: AssemblyCopyright(\"Copyright \u00a9 Microsoft 2010\")]\r\n[assembly: AssemblyTrademark(\"\")]\r\n[assembly: AssemblyCulture(\"\")]\r\n\r\n// Setting ComVisible to false makes the types in this assembly not visible \r\n// to COM components. If you need to access a type in this assembly from \r\n// COM, set the ComVisible attribute to true on that type.\r\n[assembly: ComVisible(false)]\r\n\r\n// The following GUID is for the ID of the typelib if this project is exposed to COM\r\n[assembly: Guid(\"0755fc77-447a-4817-b43f-82ba61d1c2a6\")]\r\n\r\n// Version information for an assembly consists of the following four values:\r\n//\r\n// Major Version\r\n// Minor Version \r\n// Build Number\r\n// Revision\r\n//\r\n// You can specify all the values or you can default the Build and Revision Numbers \r\n// by using the '*' as shown below:\r\n// [assembly: AssemblyVersion(\"1.0.*\")]\r\n[assembly: AssemblyVersion(\"1.0.0.0\")]\r\n[assembly: AssemblyFileVersion(\"1.0.0.0\")]\r\n", "src/dotnet/ZooKeeperNet.Tests/RecoveryTest.cs": "/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\r\nusing System;\r\nusing System.Threading;\r\nusing log4net;\r\nusing NUnit.Framework;\r\nusing Org.Apache.Zookeeper.Data;\r\n\r\nnamespace ZooKeeperNet.Tests\r\n{\r\n public class RecoveryTest : AbstractZooKeeperTests, IWatcher\r\n {\r\n private readonly ILog LOG = LogManager.GetLogger(typeof(RecoveryTest));\r\n private int setDataCount;\r\n private int processCount;\r\n private readonly string testPath = \"/unittests/recoverytests/\" + Guid.NewGuid();\r\n\r\n [Test, Explicit]\r\n public void ReconnectsWhenDisconnected()\r\n {\r\n using (CancellationTokenSource token = new CancellationTokenSource())\r\n {\r\n Thread thread = new Thread(Run)\r\n {\r\n IsBackground = true,\r\n Name = \"RecoveryTest.Run\"\r\n };\r\n thread.Start(token.Token);\r\n Thread.Sleep(15*1000);\r\n LOG.Error(\"STOP ZK!!!! Count: \" + processCount);\r\n Thread.Sleep(20*1000);\r\n LOG.Error(\"START ZK!!! Count: \" + processCount);\r\n Thread.Sleep(30*1000);\r\n LOG.Error(\"Stopping ZK client.\");\r\n token.Cancel();\r\n LOG.Error(\"Waiting for thread to stop...\" + processCount);\r\n thread.Join();\r\n if (thread.IsAlive)\r\n Assert.Fail(\"Thread still alive\");\r\n Assert.AreEqual(setDataCount, processCount, \"setDataCount == processCount\");\r\n LOG.Error(\"Finished:\" + setDataCount + \":\" + processCount);\r\n }\r\n }\r\n\r\n private void Run(object sender)\r\n {\r\n try\r\n {\r\n CancellationToken token = (CancellationToken)sender;\r\n using (ZooKeeper zooKeeper = CreateClient(this))\r\n {\r\n Stat stat = new Stat();\r\n if (zooKeeper.Exists(\"/unittests/recoverytests\", false) == null)\r\n {\r\n zooKeeper.Create(\"/unittests\", new byte[] {0}, Ids.OPEN_ACL_UNSAFE, CreateMode.Persistent);\r\n zooKeeper.Create(\"/unittests/recoverytests\", new byte[] {0}, Ids.OPEN_ACL_UNSAFE, CreateMode.Persistent);\r\n }\r\n if (zooKeeper.Exists(testPath, false) == null)\r\n { \r\n zooKeeper.Create(testPath, new byte[] {0}, Ids.OPEN_ACL_UNSAFE, CreateMode.Persistent);\r\n }\r\n while (zooKeeper.State.IsAlive() && !token.IsCancellationRequested)\r\n {\r\n try\r\n {\r\n zooKeeper.GetData(testPath, true, stat);\r\n zooKeeper.SetData(testPath, Guid.NewGuid().ToByteArray(), -1);\r\n setDataCount++;\r\n }\r\n catch(KeeperException ke)\r\n {\r\n LOG.Error(ke);\r\n }\r\n }\r\n LOG.Error(\"Waiting for dispose.\");\r\n }\r\n }\r\n catch(Exception ex)\r\n {\r\n LOG.Error(ex);\r\n }\r\n }\r\n\r\n public void Process(WatchedEvent @event)\r\n {\r\n LOG.Debug(@event);\r\n if (@event.Type == EventType.NodeCreated || @event.Type == EventType.NodeDataChanged)\r\n {\r\n Interlocked.Increment(ref processCount);\r\n }\r\n }\r\n }\r\n}", "src/dotnet/ZooKeeperNet/Properties/AssemblyInfo.cs": "/*\r\n * Licensed to the Apache Software Foundation (ASF) under one or more\r\n * contributor license agreements. See the NOTICE file distributed with\r\n * this work for additional information regarding copyright ownership.\r\n * The ASF licenses this file to You under the Apache License, Version 2.0\r\n * (the \"License\"); you may not use this file except in compliance with\r\n * the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n *\r\n */\r\nusing System.Reflection;\r\nusing System.Runtime.InteropServices;\r\n\r\n// General Information about an assembly is controlled through the following \r\n// set of attributes. Change these attribute values to modify the information\r\n// associated with an assembly.\r\n[assembly: AssemblyTitle(\"ZooKeeperNet\")]\r\n[assembly: AssemblyConfiguration(\"\")]\r\n[assembly: AssemblyCulture(\"\")]\r\n\r\n// The following GUID is for the ID of the typelib if this project is exposed to COM\r\n[assembly: Guid(\"0ee34fd6-195e-4912-906a-44062ecaffad\")]"}, "files_after": {"src/dotnet/ZooKeeperNet.Tests/AbstractZooKeeperTests.cs": "/*\r\n * Licensed to the Apache Software Foundation (ASF) under one or more\r\n * contributor license agreements. See the NOTICE file distributed with\r\n * this work for additional information regarding copyright ownership.\r\n * The ASF licenses this file to You under the Apache License, Version 2.0\r\n * (the \"License\"); you may not use this file except in compliance with\r\n * the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n *\r\n */\r\n\r\nusing System.IO;\r\nusing System.Xml;\r\n\r\nnamespace ZooKeeperNet.Tests\r\n{\r\n using System;\r\n using System.Runtime.CompilerServices;\r\n using System.Threading;\r\n\r\n public abstract class AbstractZooKeeperTests\r\n {\r\n static AbstractZooKeeperTests()\r\n {\r\n XmlDocument log4netConfig = new XmlDocument();\r\n log4netConfig.Load(File.OpenRead(\"log4net.xml\"));\r\n var repository = log4net.LogManager.CreateRepository(\"tests\");\r\n log4net.Config.XmlConfigurator.Configure(repository, log4netConfig[\"log4net\"]);\r\n }\r\n\r\n protected static readonly TimeSpan CONNECTION_TIMEOUT = new TimeSpan(0, 0, 0, 0, 10000);\r\n\r\n protected virtual ZooKeeper CreateClient()\r\n {\r\n CountdownWatcher watcher = new CountdownWatcher();\r\n return new ZooKeeper(\"127.0.0.1:2181\", new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n protected virtual ZooKeeper CreateClient(string node)\r\n {\r\n CountdownWatcher watcher = new CountdownWatcher();\r\n return new ZooKeeper(\"127.0.0.1:2181\" + node, new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n protected ZooKeeper CreateClient(IWatcher watcher)\r\n {\r\n return new ZooKeeper(\"127.0.0.1:2181\", new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n protected ZooKeeper CreateClientWithAddress(string address)\r\n {\r\n CountdownWatcher watcher = new CountdownWatcher();\r\n return new ZooKeeper(address, new TimeSpan(0, 0, 0, 10000), watcher);\r\n }\r\n\r\n public class CountdownWatcher : IWatcher\r\n {\r\n readonly ManualResetEvent resetEvent = new ManualResetEvent(false);\r\n private static readonly object sync = new object();\r\n\r\n volatile bool connected;\r\n\r\n public CountdownWatcher()\r\n {\r\n Reset();\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n public void Reset()\r\n {\r\n resetEvent.Set();\r\n connected = false;\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n public virtual void Process(WatchedEvent @event)\r\n {\r\n if (@event.State == KeeperState.SyncConnected)\r\n {\r\n connected = true;\r\n lock (sync)\r\n {\r\n Monitor.PulseAll(sync);\r\n }\r\n resetEvent.Set();\r\n }\r\n else\r\n {\r\n connected = false;\r\n lock (sync)\r\n {\r\n Monitor.PulseAll(sync);\r\n }\r\n }\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n bool IsConnected()\r\n {\r\n return connected;\r\n }\r\n\r\n [MethodImpl(MethodImplOptions.Synchronized)]\r\n void waitForConnected(TimeSpan timeout)\r\n {\r\n DateTime expire = DateTime.UtcNow + timeout;\r\n TimeSpan left = timeout;\r\n while (!connected && left.TotalMilliseconds > 0)\r\n {\r\n lock (sync)\r\n {\r\n Monitor.TryEnter(sync, left);\r\n }\r\n left = expire - DateTime.UtcNow;\r\n }\r\n if (!connected)\r\n {\r\n throw new TimeoutException(\"Did not connect\");\r\n\r\n }\r\n }\r\n\r\n void waitForDisconnected(TimeSpan timeout)\r\n {\r\n DateTime expire = DateTime.UtcNow + timeout;\r\n TimeSpan left = timeout;\r\n while (connected && left.TotalMilliseconds > 0)\r\n {\r\n lock (sync)\r\n {\r\n Monitor.TryEnter(sync, left);\r\n }\r\n left = expire - DateTime.UtcNow;\r\n }\r\n if (connected)\r\n {\r\n throw new TimeoutException(\"Did not disconnect\");\r\n }\r\n }\r\n }\r\n\r\n }\r\n}\r\n", "src/dotnet/ZooKeeperNet.Tests/RecoveryTest.cs": "/**\r\n * Licensed to the Apache Software Foundation (ASF) under one\r\n * or more contributor license agreements. See the NOTICE file\r\n * distributed with this work for additional information\r\n * regarding copyright ownership. The ASF licenses this file\r\n * to you under the Apache License, Version 2.0 (the\r\n * \"License\"); you may not use this file except in compliance\r\n * with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\nusing System;\r\nusing System.Threading;\r\nusing log4net;\r\nusing NUnit.Framework;\r\nusing Org.Apache.Zookeeper.Data;\r\n\r\nnamespace ZooKeeperNet.Tests\r\n{\r\n public class RecoveryTest : AbstractZooKeeperTests, IWatcher\r\n {\r\n private readonly ILog LOG = LogManager.GetLogger(typeof(RecoveryTest));\r\n private int setDataCount;\r\n private int processCount;\r\n private readonly string testPath = \"/unittests/recoverytests/\" + Guid.NewGuid();\r\n\r\n [Test, Explicit]\r\n public void ReconnectsWhenDisconnected()\r\n {\r\n using (\r\n CancellationTokenSource token = new CancellationTokenSource())\r\n {\r\n Thread thread = new Thread(Run)\r\n {\r\n IsBackground = true,\r\n Name = \"RecoveryTest.Run\"\r\n };\r\n thread.Start(token.Token);\r\n Thread.Sleep(15*1000);\r\n LOG.Error(\"STOP ZK!!!! Count: \" + processCount);\r\n Thread.Sleep(20*1000);\r\n LOG.Error(\"START ZK!!! Count: \" + processCount);\r\n Thread.Sleep(30*1000);\r\n LOG.Error(\"Stopping ZK client.\");\r\n token.Cancel();\r\n LOG.Error(\"Waiting for thread to stop...\" + processCount);\r\n thread.Join();\r\n if (thread.IsAlive)\r\n Assert.Fail(\"Thread still alive\");\r\n Assert.AreEqual(setDataCount, processCount, \"setDataCount == processCount\");\r\n LOG.Error(\"Finished:\" + setDataCount + \":\" + processCount);\r\n }\r\n }\r\n\r\n private void Run(object sender)\r\n {\r\n try\r\n {\r\n CancellationToken token = (CancellationToken)sender;\r\n using (ZooKeeper zooKeeper = CreateClient(this))\r\n {\r\n Stat stat = new Stat();\r\n if (zooKeeper.Exists(\"/unittests/recoverytests\", false) == null)\r\n {\r\n zooKeeper.Create(\"/unittests\", new byte[] {0}, Ids.OPEN_ACL_UNSAFE, CreateMode.Persistent);\r\n zooKeeper.Create(\"/unittests/recoverytests\", new byte[] {0}, Ids.OPEN_ACL_UNSAFE, CreateMode.Persistent);\r\n }\r\n if (zooKeeper.Exists(testPath, false) == null)\r\n { \r\n zooKeeper.Create(testPath, new byte[] {0}, Ids.OPEN_ACL_UNSAFE, CreateMode.Persistent);\r\n }\r\n while (zooKeeper.State.IsAlive() && !token.IsCancellationRequested)\r\n {\r\n try\r\n {\r\n zooKeeper.GetData(testPath, true, stat);\r\n zooKeeper.SetData(testPath, Guid.NewGuid().ToByteArray(), -1);\r\n setDataCount++;\r\n }\r\n catch(KeeperException ke)\r\n {\r\n LOG.Error(ke);\r\n }\r\n }\r\n LOG.Error(\"Waiting for dispose.\");\r\n }\r\n }\r\n catch(Exception ex)\r\n {\r\n LOG.Error(ex);\r\n }\r\n }\r\n\r\n public void Process(WatchedEvent @event)\r\n {\r\n LOG.Debug(@event);\r\n if (@event.Type == EventType.NodeCreated || @event.Type == EventType.NodeDataChanged)\r\n {\r\n Interlocked.Increment(ref processCount);\r\n }\r\n }\r\n }\r\n}"}}
-{"repo": "jedie/django-tools", "pr_number": 99, "title": "Dev", "state": "closed", "merged_at": "2024-08-25T12:57:27Z", "additions": 1243, "deletions": 561, "files_changed": ["django_tools/__init__.py", "django_tools_project/tests/__init__.py", "manage.py"], "files_before": {"django_tools/__init__.py": "\"\"\"\n \"django-tools\n miscellaneous tools for Django based projects\n\"\"\"\n\n__version__ = '0.56.0'\n__author__ = 'Jens Diemer '\n", "django_tools_project/tests/__init__.py": "import os\nimport unittest.util\nfrom pathlib import Path\n\nfrom bx_py_utils.test_utils.deny_requests import deny_any_real_request\n\n\ndef pre_configure_tests() -> None:\n print(f'Configure unittests via \"load_tests Protocol\" from {Path(__file__).relative_to(Path.cwd())}')\n\n # Hacky way to display more \"assert\"-Context in failing tests:\n _MIN_MAX_DIFF = unittest.util._MAX_LENGTH - unittest.util._MIN_DIFF_LEN\n unittest.util._MAX_LENGTH = int(os.environ.get('UNITTEST_MAX_LENGTH', 300))\n unittest.util._MIN_DIFF_LEN = unittest.util._MAX_LENGTH - _MIN_MAX_DIFF\n\n # Deny any request via docket/urllib3 because tests they should mock all requests:\n deny_any_real_request()\n\n\ndef load_tests(loader, tests, pattern):\n \"\"\"\n Use unittest \"load_tests Protocol\" as a hook to setup test environment before running tests.\n https://docs.python.org/3/library/unittest.html#load-tests-protocol\n \"\"\"\n pre_configure_tests()\n return loader.discover(start_dir=Path(__file__).parent, pattern=pattern)\n", "manage.py": "#!/usr/bin/env python3\n\n\"\"\"\n bootstrap CLI\n ~~~~~~~~~~~~~\n\n Just call this file, and the magic happens ;)\n\"\"\"\n\nimport hashlib\nimport shlex\nimport signal\nimport subprocess\nimport sys\nimport venv\nfrom pathlib import Path\n\n\ndef print_no_pip_error():\n print('Error: Pip not available!')\n print('Hint: \"apt-get install python3-venv\"\\n')\n\n\ntry:\n from ensurepip import version\nexcept ModuleNotFoundError as err:\n print(err)\n print('-' * 100)\n print_no_pip_error()\n raise\nelse:\n if not version():\n print_no_pip_error()\n sys.exit(-1)\n\n\nassert sys.version_info >= (3, 10), f'Python version {sys.version_info} is too old!'\n\n\nif sys.platform == 'win32': # wtf\n # Files under Windows, e.g.: .../.venv/Scripts/python.exe\n BIN_NAME = 'Scripts'\n FILE_EXT = '.exe'\nelse:\n # Files under Linux/Mac and all other than Windows, e.g.: .../.venv/bin/python\n BIN_NAME = 'bin'\n FILE_EXT = ''\n\nBASE_PATH = Path(__file__).parent\nVENV_PATH = BASE_PATH / '.venv'\nBIN_PATH = VENV_PATH / BIN_NAME\nPYTHON_PATH = BIN_PATH / f'python{FILE_EXT}'\nPIP_PATH = BIN_PATH / f'pip{FILE_EXT}'\nPIP_SYNC_PATH = BIN_PATH / f'pip-sync{FILE_EXT}'\n\nDEP_LOCK_PATH = BASE_PATH / 'requirements.dev.txt'\nDEP_HASH_PATH = VENV_PATH / '.dep_hash'\n\n# script file defined in pyproject.toml as [console_scripts]\n# (Under Windows: \".exe\" not added!)\nPROJECT_SHELL_SCRIPT = BIN_PATH / 'django_tools_project'\n\n\ndef get_dep_hash():\n \"\"\"Get SHA512 hash from lock file content.\"\"\"\n return hashlib.sha512(DEP_LOCK_PATH.read_bytes()).hexdigest()\n\n\ndef store_dep_hash():\n \"\"\"Generate .venv/.dep_hash\"\"\"\n DEP_HASH_PATH.write_text(get_dep_hash())\n\n\ndef venv_up2date():\n \"\"\"Is existing .venv is up-to-date?\"\"\"\n if DEP_HASH_PATH.is_file():\n return DEP_HASH_PATH.read_text() == get_dep_hash()\n return False\n\n\ndef verbose_check_call(*popen_args):\n print(f'\\n+ {shlex.join(str(arg) for arg in popen_args)}\\n')\n return subprocess.check_call(popen_args)\n\n\ndef noop_sigint_handler(signal_num, frame):\n \"\"\"\n Don't exist cmd2 shell on \"Interrupt from keyboard\"\n e.g.: User stops the dev. server by CONTROL-C\n \"\"\"\n\n\ndef main(argv):\n assert DEP_LOCK_PATH.is_file(), f'File not found: \"{DEP_LOCK_PATH}\" !'\n\n # Create virtual env in \".venv/\":\n if not PYTHON_PATH.is_file():\n print(f'Create virtual env here: {VENV_PATH.absolute()}')\n builder = venv.EnvBuilder(symlinks=True, upgrade=True, with_pip=True)\n builder.create(env_dir=VENV_PATH)\n\n if not PROJECT_SHELL_SCRIPT.is_file() or not venv_up2date():\n # Update pip\n verbose_check_call(PYTHON_PATH, '-m', 'pip', 'install', '-U', 'pip')\n\n # Install pip-tools\n verbose_check_call(PYTHON_PATH, '-m', 'pip', 'install', '-U', 'pip-tools')\n\n # install requirements via \"pip-sync\"\n verbose_check_call(PIP_SYNC_PATH, str(DEP_LOCK_PATH))\n\n # install project\n verbose_check_call(PIP_PATH, 'install', '--no-deps', '-e', '.')\n store_dep_hash()\n\n signal.signal(signal.SIGINT, noop_sigint_handler) # ignore \"Interrupt from keyboard\" signals\n\n # Call our entry point CLI:\n try:\n verbose_check_call(PROJECT_SHELL_SCRIPT, *argv[1:])\n except subprocess.CalledProcessError as err:\n sys.exit(err.returncode)\n\n\nif __name__ == '__main__':\n main(sys.argv)\n"}, "files_after": {"django_tools/__init__.py": "\"\"\"\n \"django-tools\n miscellaneous tools for Django based projects\n\"\"\"\n\n__version__ = '0.56.1'\n__author__ = 'Jens Diemer '\n", "django_tools_project/tests/__init__.py": "import os\nimport unittest.util\nfrom pathlib import Path\n\nfrom bx_py_utils.test_utils.deny_requests import deny_any_real_request\nfrom typeguard import install_import_hook\n\n\n# Check type annotations via typeguard in all tests:\ninstall_import_hook(packages=('django_tools', 'django_tools_project'))\n\n\ndef pre_configure_tests() -> None:\n print(f'Configure unittests via \"load_tests Protocol\" from {Path(__file__).relative_to(Path.cwd())}')\n\n # Hacky way to display more \"assert\"-Context in failing tests:\n _MIN_MAX_DIFF = unittest.util._MAX_LENGTH - unittest.util._MIN_DIFF_LEN\n unittest.util._MAX_LENGTH = int(os.environ.get('UNITTEST_MAX_LENGTH', 600))\n unittest.util._MIN_DIFF_LEN = unittest.util._MAX_LENGTH - _MIN_MAX_DIFF\n\n # Deny any request via docket/urllib3 because tests they should mock all requests:\n deny_any_real_request()\n\n\ndef load_tests(loader, tests, pattern):\n \"\"\"\n Use unittest \"load_tests Protocol\" as a hook to setup test environment before running tests.\n https://docs.python.org/3/library/unittest.html#load-tests-protocol\n \"\"\"\n pre_configure_tests()\n return loader.discover(start_dir=Path(__file__).parent, pattern=pattern)\n", "manage.py": "#!/usr/bin/env python3\n\n\"\"\"\n bootstrap CLI\n ~~~~~~~~~~~~~\n\n Just call this file, and the magic happens ;)\n\"\"\"\n\nimport hashlib\nimport shlex\nimport signal\nimport subprocess\nimport sys\nimport venv\nfrom pathlib import Path\n\n\ndef print_no_pip_error():\n print('Error: Pip not available!')\n print('Hint: \"apt-get install python3-venv\"\\n')\n\n\ntry:\n from ensurepip import version\nexcept ModuleNotFoundError as err:\n print(err)\n print('-' * 100)\n print_no_pip_error()\n raise\nelse:\n if not version():\n print_no_pip_error()\n sys.exit(-1)\n\n\nassert sys.version_info >= (3, 10), f'Python version {sys.version_info} is too old!'\n\n\nif sys.platform == 'win32': # wtf\n # Files under Windows, e.g.: .../.venv/Scripts/python.exe\n BIN_NAME = 'Scripts'\n FILE_EXT = '.exe'\nelse:\n # Files under Linux/Mac and all other than Windows, e.g.: .../.venv/bin/python\n BIN_NAME = 'bin'\n FILE_EXT = ''\n\nBASE_PATH = Path(__file__).parent\nVENV_PATH = BASE_PATH / '.venv'\nBIN_PATH = VENV_PATH / BIN_NAME\nPYTHON_PATH = BIN_PATH / f'python3{FILE_EXT}'\nPIP_PATH = BIN_PATH / f'pip{FILE_EXT}'\nPIP_SYNC_PATH = BIN_PATH / f'pip-sync{FILE_EXT}'\n\nDEP_LOCK_PATH = BASE_PATH / 'requirements.dev.txt'\nDEP_HASH_PATH = VENV_PATH / '.dep_hash'\n\n# script file defined in pyproject.toml as [console_scripts]\n# (Under Windows: \".exe\" not added!)\nPROJECT_SHELL_SCRIPT = BIN_PATH / 'django_tools_project'\n\n\ndef get_dep_hash():\n \"\"\"Get SHA512 hash from lock file content.\"\"\"\n return hashlib.sha512(DEP_LOCK_PATH.read_bytes()).hexdigest()\n\n\ndef store_dep_hash():\n \"\"\"Generate .venv/.dep_hash\"\"\"\n DEP_HASH_PATH.write_text(get_dep_hash())\n\n\ndef venv_up2date():\n \"\"\"Is existing .venv is up-to-date?\"\"\"\n if DEP_HASH_PATH.is_file():\n return DEP_HASH_PATH.read_text() == get_dep_hash()\n return False\n\n\ndef verbose_check_call(*popen_args):\n print(f'\\n+ {shlex.join(str(arg) for arg in popen_args)}\\n')\n return subprocess.check_call(popen_args)\n\n\ndef noop_sigint_handler(signal_num, frame):\n \"\"\"\n Don't exist cmd2 shell on \"Interrupt from keyboard\"\n e.g.: User stops the dev. server by CONTROL-C\n \"\"\"\n\n\ndef main(argv):\n assert DEP_LOCK_PATH.is_file(), f'File not found: \"{DEP_LOCK_PATH}\" !'\n\n # Create virtual env in \".venv/\":\n if not PYTHON_PATH.is_file():\n print(f'Create virtual env here: {VENV_PATH.absolute()}')\n builder = venv.EnvBuilder(symlinks=True, upgrade=True, with_pip=True)\n builder.create(env_dir=VENV_PATH)\n\n if not PROJECT_SHELL_SCRIPT.is_file() or not venv_up2date():\n # Update pip\n verbose_check_call(PYTHON_PATH, '-m', 'pip', 'install', '-U', 'pip')\n\n # Install pip-tools\n verbose_check_call(PYTHON_PATH, '-m', 'pip', 'install', '-U', 'pip-tools')\n\n # install requirements via \"pip-sync\"\n verbose_check_call(PIP_SYNC_PATH, str(DEP_LOCK_PATH))\n\n # install project\n verbose_check_call(PIP_PATH, 'install', '--no-deps', '-e', '.')\n store_dep_hash()\n\n # Activate git pre-commit hooks:\n verbose_check_call(PYTHON_PATH, '-m', 'pre_commit', 'install')\n verbose_check_call(PYTHON_PATH, '-m', 'pre_commit', 'autoupdate')\n\n signal.signal(signal.SIGINT, noop_sigint_handler) # ignore \"Interrupt from keyboard\" signals\n\n # Call our entry point CLI:\n try:\n verbose_check_call(PROJECT_SHELL_SCRIPT, *argv[1:])\n except subprocess.CalledProcessError as err:\n sys.exit(err.returncode)\n\n\nif __name__ == '__main__':\n main(sys.argv)\n"}}
-{"repo": "jmax/bookr", "pr_number": 2, "title": "Modificadiones solicitadas", "state": "closed", "merged_at": null, "additions": 7, "deletions": 1, "files_changed": ["public/stylesheets/customstyles.css"], "files_before": {}, "files_after": {"public/stylesheets/customstyles.css": "//add here your custom styles"}}
-{"repo": "hmason/hackMatch", "pr_number": 1, "title": "hmason", "state": "open", "merged_at": null, "additions": 111, "deletions": 89, "files_changed": ["download_stopwords.py", "hackmatch.py"], "files_before": {"hackmatch.py": "#!/usr/bin/env python\n# encoding: utf-8\n# recognize.\n\"\"\"\nhackmatch.py\n\nCreated by Hilary Mason, Chris Wiggins, and Evan Korth.\nCopyright (c) 2010 hackNY. All rights reserved.\n\"\"\"\n\nimport sys, os\nimport csv\nimport string\nfrom collections import defaultdict\nfrom optparse import OptionParser\nfrom nltk.tokenize import *\nfrom nltk.corpus import stopwords\nfrom hcluster import jaccard\n\n# startups: Name,E-mail,Company,In NYC,Funding,Site,Blog,Twitter,Num Employees,Environment,Project,Skills,Misc\n# students: Student Name,e-mail,University,Major,Degree,Graduation Date,Site,Blog,Twitter,Facebook,Project,Skills,Misc\n\nclass HackMatch(object):\n DEBUG = False\n BOW_FIELDS = ['Environment', 'Project', 'Skills', 'Misc']\n COMPLETENESS_THRESHOLD = 4 # num of words necessary to match\n \n def __init__(self, student_file, startup_file, num_matches=3, distance=jaccard):\n self.stopwords = self.get_stopwords()\n self.distance = distance\n \n student_data = self.parseCSV(student_file)\n startup_data = self.parseCSV(startup_file)\n \n doc_words = self.defineFeatures([student_data, startup_data], self.BOW_FIELDS)\n\n # matches = self.doRanking(student_data, startup_data, doc_words, self.BOW_FIELDS, base_name_field='Student Name', match_name_field='Company')\n matches = self.doRanking(startup_data, student_data, doc_words, self.BOW_FIELDS)\n\n self.printMatches(matches, num_matches)\n \n def printMatches(self, matches, num_matches):\n for n, m in matches.items():\n print n\n for item, score in sorted(m.items(), key=lambda(i,c):(-c, i))[:num_matches]:\n print \"\\t%s :: %s\" % (item, score)\n # print \"'%s' '%s' %s\" % (n.translate(string.maketrans(\"\",\"\"), string.punctuation), item.translate(string.maketrans(\"\",\"\"), string.punctuation), score)\n print '\\n'\n \n \n def doRanking(self, base_data, match_data, doc_words, fields=[], base_name_field='Company', match_name_field='Student Name'):\n \"\"\"\n do ranking\n \"\"\"\n base = {}\n for item in base_data:\n base[item[base_name_field]] = self.extractFeatures(item, doc_words, fields)\n \n matches = defaultdict(dict)\n for match_item in match_data:\n match_features = self.extractFeatures(match_item, doc_words, fields)\n\n for base_item, base_item_features in base.items(): # actually do the comparison\n if not base_item_features or not match_features:\n matches[match_item[match_name_field]][base_item] = 0.0\n else:\n matches[match_item[match_name_field]][base_item] = self.distance(base_item_features, match_features)\n if self.DEBUG:\n print \"%s :: %s = %s \" % (match_item[match_name_field], base_item, self.distance(base_item_features, match_features))\n\n return matches\n\n def extractFeatures(self, item, doc_words, fields=[]):\n s_tokens = []\n for f in fields:\n tokens = None\n try:\n tokens = word_tokenize(item[f])\n except (KeyError, TypeError):\n pass\n \n if tokens:\n s_tokens.extend(tokens)\n \n s_features = [] \n for token in doc_words:\n if token in s_tokens:\n s_features.append(1)\n else:\n s_features.append(0)\n \n if sum(s_features) <= self.COMPLETENESS_THRESHOLD:\n return None\n \n return s_features\n\n def defineFeatures(self, data, fields=[]):\n \"\"\"\n define the global bag of words features\n \"\"\"\n ngram_freq = {}\n \n for d in data:\n for r in d:\n for f in fields:\n tokens = None\n try:\n tokens = word_tokenize(r[f])\n except (KeyError, TypeError):\n pass\n \n if tokens:\n for t in [t.lower() for t in tokens if t.lower() not in self.stopwords]:\n t = t.strip('.')\n ngram_freq[t] = ngram_freq.get(t, 0) + 1\n \n ngram_freq = dict([(w,c) for w,c in ngram_freq.items() if c > 1])\n if self.DEBUG:\n print \"Global vocabulary: %s\" % len(ngram_freq) \n return ngram_freq\n \n def get_stopwords(self):\n sw = stopwords.words('english')\n sw.extend([',', '\\xe2', '.', ')', '(', ':', \"'s\", \"'nt\", '\\x99', '\\x86', '\\xae', '\\x92'])\n return sw\n \n def parseCSV(self, filename):\n \"\"\"\n parseCSV: parses the CSV file to a dict\n \"\"\"\n csv_reader = csv.DictReader(open(filename))\n return [r for r in csv_reader]\n \n \nif __name__ == '__main__':\n parser = OptionParser()\n parser.add_option(\"-n\",\"--number\", action=\"store\", type=\"int\", dest=\"num_matches\",default=10,help=\"number of results to return\")\n parser.add_option(\"-s\",\"--student\", action=\"store\", type=\"string\", dest=\"student_file\",default=\"unmatched_students.csv\",help=\"csv of student data\")\n parser.add_option(\"-t\",\"--startup\", action=\"store\", type=\"string\", dest=\"startup_file\",default=\"unmatched_top_startups.csv\",help=\"csv of startup data\")\n (options, args) = parser.parse_args()\n \n h = HackMatch(num_matches=options.num_matches, student_file=options.student_file, startup_file=options.startup_file)"}, "files_after": {"download_stopwords.py": "from nltk import download\ndownload('stopwords')\n", "hackmatch.py": "#!/usr/bin/env python\n# encoding: utf-8\n# recognize.\n\"\"\"\nhackmatch.py\n\nCreated by Hilary Mason, Chris Wiggins, and Evan Korth.\nCopyright (c) 2010 hackNY. All rights reserved.\n\"\"\"\n# pylint: disable=W0614\n# pylint: disable=C0301\n\nfrom collections import defaultdict\nfrom optparse import OptionParser\nfrom nltk.corpus import stopwords\nfrom nltk.tokenize import word_tokenize\nfrom hcluster import jaccard\nfrom operator import itemgetter\nfrom csv import DictReader\n\n# startups: Name,E-mail,Company,In NYC,Funding,Site,Blog,Twitter,Num Employees,Environment,Project,Skills,Misc\n# students: Student Name,e-mail,University,Major,Degree,Graduation Date,Site,Blog,Twitter,Facebook,Project,Skills,Misc\n\n# Hack\n# I'd like to write this:\n# return reduce(list.extend, list_of_lists)\n# but it generates an error I don't get\ndef list_reducer(list_iter):\n result = []\n for l in list_iter:\n result.extend(l)\n return result\n\ndef get_stopwords():\n \"\"\"\n get_stopwords: generate a list of stop words\n \"\"\"\n return stopwords.words('english') + [',', '\\xe2', '.', ')', '(', ':', \"'s\", \"'nt\", '\\x99', '\\x86', '\\xae', '\\x92']\n\ndef parse_csv(filename):\n \"\"\"\n parse_csv: parses the CSV file to a dict\n \"\"\"\n csv_reader = DictReader(open(filename))\n return [r for r in csv_reader]\n\ndef print_matches(matches, num_matches):\n \"\"\"\n print_matches: print the top 'num_matches' matches\n \"\"\"\n for key, value_dict in matches.items():\n print key\n all_matches = sorted(value_dict.items(), key=itemgetter(1))\n top_matches = all_matches[-num_matches:]\n for item, score in top_matches:\n print \"\\t%(item)s :: %(score)s\" % locals()\n # print \"'%s' '%s' %s\" % (n.translate(string.maketrans(\"\",\"\"), string.punctuation), item.translate(string.maketrans(\"\",\"\"), string.punctuation), score)\n print '\\n'\n\nclass HackMatch(object):\n \"\"\"\n HackMatch: class to encapsulate matching companies versus startups on selected fields\n \"\"\"\n DEBUG = False\n BOW_FIELDS = ['Environment', 'Project', 'Skills', 'Misc']\n COMPLETENESS_THRESHOLD = 4 # num of words necessary to match\n \n def __init__(self, student_file, startup_file, num_matches=3, distance=jaccard):\n self.stopwords = set(get_stopwords())\n self.distance = distance\n \n student_data = parse_csv(student_file)\n startup_data = parse_csv(startup_file)\n \n doc_words = self.define_features([student_data, startup_data], self.BOW_FIELDS)\n\n # matches = self.do_ranking(student_data, startup_data, doc_words, self.BOW_FIELDS, base_name_field='Student Name', match_name_field='Company')\n matches = self.do_ranking(startup_data, student_data, doc_words, self.BOW_FIELDS)\n\n print_matches(matches, num_matches)\n \n def do_ranking(self, base_data, match_data, doc_words, fields=None, base_name_field='Company', match_name_field='Student Name'):\n \"\"\"\n do ranking\n \"\"\"\n fields = fields or []\n base = dict((item[base_name_field], self.extract_features(item, doc_words, fields)) for item in base_data)\n\n matches = defaultdict(dict)\n for match_item in match_data:\n match_features = self.extract_features(match_item, doc_words, fields)\n temp_dict = matches[match_item[match_name_field]]\n for base_item, base_item_features in base.items(): # actually do the comparison\n if not base_item_features or not match_features:\n temp_dict[base_item] = 0.0\n else:\n temp_dict[base_item] = self.distance(base_item_features, match_features)\n if self.DEBUG:\n print \"%s :: %s = %s \" % (match_item[match_name_field], base_item, self.distance(base_item_features, match_features))\n return matches\n\n def extract_features(self, item_dict, doc_words, fields=None):\n \"\"\"\n extract_features: Determine whether features pass test\n \"\"\"\n fields = fields or []\n tokeniter = (item_dict[f] for f in fields if f in item_dict)\n s_tokens = list_reducer(tokeniter)\n s_features = [token in s_tokens for token in doc_words]\n return s_features if sum(s_features) > self.COMPLETENESS_THRESHOLD else None\n\n def define_features(self, data, fields=None):\n \"\"\"\n define the global bag of words features\n \"\"\"\n fields = fields or []\n ngram_freq = defaultdict(int)\n \n featureiter = (\n r[f]\n for d in data\n for r in d\n for f in fields\n if f in r\n )\n for field in featureiter:\n tokeniter = (word.lower() for word in word_tokenize(field))\n legaliter = (word.strip('.') for word in tokeniter if word not in self.stopwords)\n for legal_word in legaliter:\n ngram_freq[legal_word] += 1\n ngram_freq = dict((word, word_count) for word, word_count in ngram_freq.items() if word_count > 1)\n if self.DEBUG:\n print \"Global vocabulary: %s\" % len(ngram_freq) \n return ngram_freq\n\nif __name__ == '__main__':\n parser = OptionParser()\n parser.add_option(\"-n\", \"--number\", action=\"store\", type=\"int\", dest=\"num_matches\", default=10, help=\"number of results to return\")\n parser.add_option(\"-s\", \"--student\", action=\"store\", type=\"string\", dest=\"student_file\", default=\"unmatched_students.csv\", help=\"csv of student data\")\n parser.add_option(\"-t\", \"--startup\", action=\"store\", type=\"string\", dest=\"startup_file\", default=\"unmatched_top_startups.csv\", help=\"csv of startup data\")\n (options, args) = parser.parse_args()\n \n hackmatch = HackMatch(num_matches=options.num_matches, student_file=options.student_file, startup_file=options.startup_file)\n"}}
-{"repo": "babs/pyrowl", "pr_number": 7, "title": "pep8 style", "state": "closed", "merged_at": null, "additions": 30, "deletions": 26, "files_changed": ["pynma/__init__.py", "pynma/pynma.py", "setup.py", "test.py"], "files_before": {"pynma/__init__.py": "#!/usr/bin/python\n\nfrom .pynma import PyNMA \n\n", "pynma/pynma.py": "#!/usr/bin/python\n\nfrom xml.dom.minidom import parseString\n\ntry:\n from http.client import HTTPSConnection\nexcept ImportError:\n from httplib import HTTPSConnection\n\ntry:\n from urllib.parse import urlencode\nexcept ImportError:\n from urllib import urlencode\n\n__version__ = \"0.1\"\n\nAPI_SERVER = 'www.notifymyandroid.com'\nADD_PATH = '/publicapi/notify'\n\nUSER_AGENT=\"PyNMA/v%s\"%__version__\n\ndef uniq_preserve(seq): # Dave Kirby\n # Order preserving\n seen = set()\n return [x for x in seq if x not in seen and not seen.add(x)]\n\ndef uniq(seq):\n # Not order preserving\n return list({}.fromkeys(seq).keys())\n\nclass PyNMA(object):\n \"\"\"PyNMA(apikey=[], developerkey=None)\ntakes 2 optional arguments:\n - (opt) apykey: might me a string containing 1 key or an array of keys\n - (opt) developerkey: where you can store your developer key\n\"\"\"\n\n def __init__(self, apikey=[], developerkey=None):\n self._developerkey = None\n self.developerkey(developerkey)\n if apikey:\n if type(apikey) == str:\n apikey = [apikey]\n self._apikey = uniq(apikey)\n\n def addkey(self, key):\n \"Add a key (register ?)\"\n if type(key) == str:\n if not key in self._apikey:\n self._apikey.append(key)\n elif type(key) == list:\n for k in key:\n if not k in self._apikey:\n self._apikey.append(k)\n\n def delkey(self, key):\n \"Removes a key (unregister ?)\"\n if type(key) == str:\n if key in self._apikey:\n self._apikey.remove(key)\n elif type(key) == list:\n for k in key:\n if key in self._apikey:\n self._apikey.remove(k)\n\n def developerkey(self, developerkey):\n \"Sets the developer key (and check it has the good length)\"\n if type(developerkey) == str and len(developerkey) == 48:\n self._developerkey = developerkey\n\n def push(self, application=\"\", event=\"\", description=\"\", url=\"\", contenttype=None, priority=0, batch_mode=False, html=False):\n \"\"\"Pushes a message on the registered API keys.\ntakes 5 arguments:\n - (req) application: application name [256]\n - (req) event: event name [1000]\n - (req) description: description [10000]\n - (opt) url: url [512]\n - (opt) contenttype: Content Type (act: None (plain text) or text/html)\n - (opt) priority: from -2 (lowest) to 2 (highest) (def:0)\n - (opt) batch_mode: push to all keys at once (def:False)\n\n - (opt) html: shortcut for contenttype=text/html\n\n - (opt) html: shortcut for contenttype=text/html\n\nWarning: using batch_mode will return error only if all API keys are bad\n cf: https://www.notifymyandroid.com/api.jsp\n\"\"\"\n datas = {\n 'application': application[:256].encode('utf8'),\n 'event': event[:1024].encode('utf8'),\n 'description': description[:10000].encode('utf8'),\n 'priority': priority\n }\n\n if url:\n datas['url'] = url[:512]\n\n if contenttype == \"text/html\" or html == True: # Currently only accepted content type\n datas['content-type'] = \"text/html\"\n\n if self._developerkey:\n datas['developerkey'] = self._developerkey\n\n results = {}\n\n if not batch_mode:\n for key in self._apikey:\n datas['apikey'] = key\n res = self.callapi('POST', ADD_PATH, datas)\n results[key] = res\n else:\n datas['apikey'] = \",\".join(self._apikey)\n res = self.callapi('POST', ADD_PATH, datas)\n results[datas['apikey']] = res\n return results\n \n def callapi(self, method, path, args):\n headers = { 'User-Agent': USER_AGENT }\n if method == \"POST\":\n headers['Content-type'] = \"application/x-www-form-urlencoded\"\n http_handler = HTTPSConnection(API_SERVER)\n http_handler.request(method, path, urlencode(args), headers)\n resp = http_handler.getresponse()\n\n try:\n res = self._parse_reponse(resp.read())\n except Exception as e:\n res = {'type': \"pynmaerror\",\n 'code': 600,\n 'message': str(e)\n }\n pass\n \n return res\n\n def _parse_reponse(self, response):\n root = parseString(response).firstChild\n for elem in root.childNodes:\n if elem.nodeType == elem.TEXT_NODE: continue\n if elem.tagName == 'success':\n res = dict(list(elem.attributes.items()))\n res['message'] = \"\"\n res['type'] = elem.tagName\n return res\n if elem.tagName == 'error':\n res = dict(list(elem.attributes.items()))\n res['message'] = elem.firstChild.nodeValue\n res['type'] = elem.tagName\n return res\n \n \n", "setup.py": "#!/usr/bin/env python\nfrom setuptools import setup, find_packages\n\nsetup(\n name='pyrowl',\n version='0.1',\n packages=find_packages()\n)\n\n", "test.py": "#!/usr/bin/python\n\nfrom pynma import PyNMA\nfrom pprint import pprint\nimport os\n\np = None\n\ndef main(keys):\n global p\n pkey = None\n \n p = PyNMA()\n if os.path.isfile(\"mydeveloperkey\"):\n dkey = open(\"mydeveloperkey\",'r').readline().strip()\n p.developerkey(dkey)\n\n p.addkey(keys)\n res = p.push(\"test app\", 'test event', 'test msg google', 'http://example.com', batch_mode=False, html=True)\n pprint(res)\n \nif __name__ == \"__main__\":\n if os.path.isfile('myapikey'):\n keys = [_f for _f in open(\"myapikey\",'r').read().split(\"\\n\") if _f]\n \n main(keys)\n else:\n print(\"need a file named myapikey containing one apikey per line\")\n"}, "files_after": {"pynma/__init__.py": "#!/usr/bin/python\n\nfrom .pynma import PyNMA\n", "pynma/pynma.py": "#!/usr/bin/python\n\nfrom xml.dom.minidom import parseString\n\ntry:\n from http.client import HTTPSConnection\nexcept ImportError:\n from httplib import HTTPSConnection\n\ntry:\n from urllib.parse import urlencode\nexcept ImportError:\n from urllib import urlencode\n\n__version__ = \"0.1\"\n\nAPI_SERVER = 'www.notifymyandroid.com'\nADD_PATH = '/publicapi/notify'\n\nUSER_AGENT = \"PyNMA/v%s\" % __version__\n\n\ndef uniq_preserve(seq): # Dave Kirby\n # Order preserving\n seen = set()\n return [x for x in seq if x not in seen and not seen.add(x)]\n\n\ndef uniq(seq):\n # Not order preserving\n return list({}.fromkeys(seq).keys())\n\n\nclass PyNMA(object):\n \"\"\"PyNMA(apikey=[], developerkey=None)\ntakes 2 optional arguments:\n - (opt) apykey: might me a string containing 1 key or an array of keys\n - (opt) developerkey: where you can store your developer key\n\"\"\"\n\n def __init__(self, apikey=[], developerkey=None):\n self._developerkey = None\n self.developerkey(developerkey)\n if apikey:\n if type(apikey) == str:\n apikey = [apikey]\n self._apikey = uniq(apikey)\n\n def addkey(self, key):\n \"Add a key (register ?)\"\n if type(key) == str:\n if key not in self._apikey:\n self._apikey.append(key)\n elif type(key) == list:\n for k in key:\n if k not in self._apikey:\n self._apikey.append(k)\n\n def delkey(self, key):\n \"Removes a key (unregister ?)\"\n if type(key) == str:\n if key in self._apikey:\n self._apikey.remove(key)\n elif type(key) == list:\n for k in key:\n if key in self._apikey:\n self._apikey.remove(k)\n\n def developerkey(self, developerkey):\n \"Sets the developer key (and check it has the good length)\"\n if type(developerkey) == str and len(developerkey) == 48:\n self._developerkey = developerkey\n\n def push(self, application=\"\", event=\"\", description=\"\", url=\"\",\n contenttype=None, priority=0, batch_mode=False, html=False):\n \"\"\"Pushes a message on the registered API keys.\ntakes 5 arguments:\n - (req) application: application name [256]\n - (req) event: event name [1000]\n - (req) description: description [10000]\n - (opt) url: url [512]\n - (opt) contenttype: Content Type (act: None (plain text) or text/html)\n - (opt) priority: from -2 (lowest) to 2 (highest) (def:0)\n - (opt) batch_mode: push to all keys at once (def:False)\n\n - (opt) html: shortcut for contenttype=text/html\n\n - (opt) html: shortcut for contenttype=text/html\n\nWarning: using batch_mode will return error only if all API keys are bad\n cf: https://www.notifymyandroid.com/api.jsp\n\"\"\"\n datas = {\n 'application': application[:256].encode('utf8'),\n 'event': event[:1024].encode('utf8'),\n 'description': description[:10000].encode('utf8'),\n 'priority': priority\n }\n\n if url:\n datas['url'] = url[:512]\n\n # Currently only accepted content type\n if contenttype == \"text/html\" or html is True:\n datas['content-type'] = \"text/html\"\n\n if self._developerkey:\n datas['developerkey'] = self._developerkey\n\n results = {}\n\n if not batch_mode:\n for key in self._apikey:\n datas['apikey'] = key\n res = self.callapi('POST', ADD_PATH, datas)\n results[key] = res\n else:\n datas['apikey'] = \",\".join(self._apikey)\n res = self.callapi('POST', ADD_PATH, datas)\n results[datas['apikey']] = res\n return results\n\n def callapi(self, method, path, args):\n headers = {'User-Agent': USER_AGENT}\n if method == \"POST\":\n headers['Content-type'] = \"application/x-www-form-urlencoded\"\n http_handler = HTTPSConnection(API_SERVER)\n http_handler.request(method, path, urlencode(args), headers)\n resp = http_handler.getresponse()\n\n try:\n res = self._parse_reponse(resp.read())\n except Exception as e:\n res = {'type': \"pynmaerror\",\n 'code': 600,\n 'message': str(e)\n }\n pass\n\n return res\n\n def _parse_reponse(self, response):\n root = parseString(response).firstChild\n for elem in root.childNodes:\n if elem.nodeType == elem.TEXT_NODE:\n continue\n if elem.tagName == 'success':\n res = dict(list(elem.attributes.items()))\n res['message'] = \"\"\n res['type'] = elem.tagName\n return res\n if elem.tagName == 'error':\n res = dict(list(elem.attributes.items()))\n res['message'] = elem.firstChild.nodeValue\n res['type'] = elem.tagName\n return res\n", "setup.py": "#!/usr/bin/env python\nfrom setuptools import setup, find_packages\n\nsetup(\n name='pyrowl',\n version='0.1',\n packages=find_packages()\n)\n", "test.py": "#!/usr/bin/python\n\nfrom pynma import PyNMA\nfrom pprint import pprint\nimport os\n\np = None\n\n\ndef main(keys):\n global p\n\n p = PyNMA()\n if os.path.isfile(\"mydeveloperkey\"):\n dkey = open(\"mydeveloperkey\", 'r').readline().strip()\n p.developerkey(dkey)\n\n p.addkey(keys)\n res = p.push(\"test app\", 'test event',\n 'test msg google',\n 'http://example.com', batch_mode=False, html=True)\n pprint(res)\n\nif __name__ == \"__main__\":\n if os.path.isfile('myapikey'):\n keys = [_f for _f in open(\"myapikey\", 'r').read().split(\"\\n\") if _f]\n\n main(keys)\n else:\n print(\"need a file named myapikey containing one apikey per line\")\n"}}
-{"repo": "marcuswestin/fin", "pr_number": 1, "title": "Hi! I fixed some code for you!", "state": "closed", "merged_at": "2012-07-09T02:14:48Z", "additions": 1, "deletions": 1, "files_changed": ["engines/development/storage.js"], "files_before": {"engines/development/storage.js": "var fs = require('fs'),\n\tpath = require('path')\n\nvar data = {},\n\tdataDumpFile = './_development-engine-dump.json'\n\nif (path.existsSync(dataDumpFile)) {\n\tconsole.log('node engine found ' + dataDumpFile + ' - loading data...')\n\tdata = JSON.parse(fs.readFileSync(dataDumpFile))\n\tconsole.log('done loading data')\n}\n\nprocess.on('exit', function() {\n\tconsole.log('node storage engine detected shutdown')\n\tdumpData()\n})\n\nfunction dumpData() {\n\tconsole.log('dumping data...')\n\tfs.writeFileSync(dataDumpFile, JSON.stringify(data))\n\tconsole.log('done dumping data.')\n}\n\nsetInterval(dumpData, 60000)\n\nfunction typeError(operation, type, key) {\n\treturn '\"'+operation+'\" expected a '+type+' at key \"'+key+'\" but found a '+typeof data[key]\n}\n\nvar storeAPI = module.exports = {\n\t/* Getters\n\t *********/\n\tgetBytes: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, null)\n\t\t} else if (typeof data[key] == 'string' || typeof data[key] == 'number' || typeof data[key] == 'boolean') {\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('getBytes', 'string or number', key))\n\t\t}\n\t},\n\t\n\tgetListItems: function(key, from, to, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, [])\n\t\t} else if (!(data[key] instanceof Array)) {\n\t\t\tcallback && callback(typeError('getListItems', 'list', key))\n\t\t} else {\n\t\t\tif (to < 0) { to = data[key].length + to + 1 }\n\t\t\tfrom = Math.max(from, 0)\n\t\t\tto = Math.min(to, data[key].length)\n\t\t\tcallback && callback(null, data[key].slice(from, to - from))\n\t\t}\n\t},\n\t\n\tgetMembers: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, [])\n\t\t} else if (typeof data[key] != 'object') {\n\t\t\tcallback && callback(typeError('getMembers', 'set', key))\n\t\t} else {\n\t\t\tvar response = []\n\t\t\tfor (var value in data[key]) { response.push(JSON.parse(value)) }\n\t\t\tcallback && callback(null, response)\n\t\t}\n\t},\n\t\n\t/* Mutation handlers\n\t *******************/\n\thandleMutation: function(operation, key, args, callback) {\n\t\tvar operationArgs = [key].concat(args)\n\t\tif (callback) { operationArgs.push(callback) }\n\t\tstoreAPI[operation].apply(this, operationArgs)\n\t},\n\t\n\ttransact: function(transactionFn) {\n\t\t// the development engine acts atomically. We assume node won't halt during an operation\n\t\ttransactionFn()\n\t},\n\t\n\tset: function(key, value, callback) {\n\t\tif (typeof data[key] == 'undefined' || typeof data[key] == 'string' || typeof data[key] == 'number' || typeof data[key] == 'boolean') {\n\t\t\tdata[key] = value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('set', 'string or number', key), null)\n\t\t}\n\t},\n\t\n\tpush: function(key, values, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = [].concat(values)\n\t\t\tcallback && callback(null, null)\n\t\t} else if (data[key] instanceof Array) {\n\t\t\tdata[key] = data[key].concat(values)\n\t\t\tcallback && callback(null, null)\n\t\t} else {\n\t\t\tcallback && callback(typeError('push', 'list', key), null)\n\t\t}\n\t},\n\t\n\tunshift: function(key, values, callback) {\n\t\tvar values = Array.prototype.slice.call(arguments, 1)\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = [].concat(values)\n\t\t\tcallback && callback(null, null)\n\t\t} else if (data[key] instanceof Array) {\n\t\t\tdata[key] = values.concat(data[key])\n\t\t\tcallback && callback(null, null)\n\t\t} else {\n\t\t\tcallback && callback(typeError('push', 'list', key), null)\n\t\t}\n\t},\n\t\n\tincrement: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = 1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] += 1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('increment', 'number', key), null)\n\t\t}\n\t},\n\t\n\tdecrement: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = -1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] -= 1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('decrement', 'number', key), null)\n\t\t}\n\t},\n\t\n\tadd: function(key, value, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] += value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('add', 'number', key), null)\n\t\t}\n\t},\n\t\n\tsubtract: function(key, value, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = -value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] -= value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('subtract', 'number', key), null)\n\t\t}\n\t},\n\t\n\tsadd: function(key, value, callback) {\n\t\tvalue = JSON.stringify(value)\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = {}\n\t\t\tdata[key][value] = true\n\t\t\tcallback && callback(null, 1)\n\t\t} else if (typeof data[key] == 'object') {\n\t\t\tvar response = data[key][value] ? 0 : 1\n\t\t\tdata[key][value] = true\n\t\t\tcallback && callback(null, response)\n\t\t} else {\n\t\t\tcallback && callback(typeError('sadd', 'set', key), null)\n\t\t}\n\t},\n\t\n\tsrem: function(key, value, callback) {\n\t\tvalue = JSON.stringify(value)\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, 0)\n\t\t} else if (typeof data[key] == 'object') {\n\t\t\tvar response = data[key][value] ? 1 : 0\n\t\t\tdelete data[key][value]\n\t\t\tcallback && callback(null, response)\n\t\t} else {\n\t\t\tcallback && callback(typeError('srem', 'set', key), null)\n\t\t}\n\t}\n}\n"}, "files_after": {"engines/development/storage.js": "var fs = require('fs'),\n\tpath = require('path')\n\nvar data = {},\n\tdataDumpFile = './_development-engine-dump.json'\n\nif (fs.existsSync(dataDumpFile)) {\n\tconsole.log('node engine found ' + dataDumpFile + ' - loading data...')\n\tdata = JSON.parse(fs.readFileSync(dataDumpFile))\n\tconsole.log('done loading data')\n}\n\nprocess.on('exit', function() {\n\tconsole.log('node storage engine detected shutdown')\n\tdumpData()\n})\n\nfunction dumpData() {\n\tconsole.log('dumping data...')\n\tfs.writeFileSync(dataDumpFile, JSON.stringify(data))\n\tconsole.log('done dumping data.')\n}\n\nsetInterval(dumpData, 60000)\n\nfunction typeError(operation, type, key) {\n\treturn '\"'+operation+'\" expected a '+type+' at key \"'+key+'\" but found a '+typeof data[key]\n}\n\nvar storeAPI = module.exports = {\n\t/* Getters\n\t *********/\n\tgetBytes: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, null)\n\t\t} else if (typeof data[key] == 'string' || typeof data[key] == 'number' || typeof data[key] == 'boolean') {\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('getBytes', 'string or number', key))\n\t\t}\n\t},\n\t\n\tgetListItems: function(key, from, to, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, [])\n\t\t} else if (!(data[key] instanceof Array)) {\n\t\t\tcallback && callback(typeError('getListItems', 'list', key))\n\t\t} else {\n\t\t\tif (to < 0) { to = data[key].length + to + 1 }\n\t\t\tfrom = Math.max(from, 0)\n\t\t\tto = Math.min(to, data[key].length)\n\t\t\tcallback && callback(null, data[key].slice(from, to - from))\n\t\t}\n\t},\n\t\n\tgetMembers: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, [])\n\t\t} else if (typeof data[key] != 'object') {\n\t\t\tcallback && callback(typeError('getMembers', 'set', key))\n\t\t} else {\n\t\t\tvar response = []\n\t\t\tfor (var value in data[key]) { response.push(JSON.parse(value)) }\n\t\t\tcallback && callback(null, response)\n\t\t}\n\t},\n\t\n\t/* Mutation handlers\n\t *******************/\n\thandleMutation: function(operation, key, args, callback) {\n\t\tvar operationArgs = [key].concat(args)\n\t\tif (callback) { operationArgs.push(callback) }\n\t\tstoreAPI[operation].apply(this, operationArgs)\n\t},\n\t\n\ttransact: function(transactionFn) {\n\t\t// the development engine acts atomically. We assume node won't halt during an operation\n\t\ttransactionFn()\n\t},\n\t\n\tset: function(key, value, callback) {\n\t\tif (typeof data[key] == 'undefined' || typeof data[key] == 'string' || typeof data[key] == 'number' || typeof data[key] == 'boolean') {\n\t\t\tdata[key] = value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('set', 'string or number', key), null)\n\t\t}\n\t},\n\t\n\tpush: function(key, values, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = [].concat(values)\n\t\t\tcallback && callback(null, null)\n\t\t} else if (data[key] instanceof Array) {\n\t\t\tdata[key] = data[key].concat(values)\n\t\t\tcallback && callback(null, null)\n\t\t} else {\n\t\t\tcallback && callback(typeError('push', 'list', key), null)\n\t\t}\n\t},\n\t\n\tunshift: function(key, values, callback) {\n\t\tvar values = Array.prototype.slice.call(arguments, 1)\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = [].concat(values)\n\t\t\tcallback && callback(null, null)\n\t\t} else if (data[key] instanceof Array) {\n\t\t\tdata[key] = values.concat(data[key])\n\t\t\tcallback && callback(null, null)\n\t\t} else {\n\t\t\tcallback && callback(typeError('push', 'list', key), null)\n\t\t}\n\t},\n\t\n\tincrement: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = 1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] += 1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('increment', 'number', key), null)\n\t\t}\n\t},\n\t\n\tdecrement: function(key, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = -1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] -= 1\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('decrement', 'number', key), null)\n\t\t}\n\t},\n\t\n\tadd: function(key, value, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] += value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('add', 'number', key), null)\n\t\t}\n\t},\n\t\n\tsubtract: function(key, value, callback) {\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = -value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else if (typeof data[key] == 'number') {\n\t\t\tdata[key] -= value\n\t\t\tcallback && callback(null, data[key])\n\t\t} else {\n\t\t\tcallback && callback(typeError('subtract', 'number', key), null)\n\t\t}\n\t},\n\t\n\tsadd: function(key, value, callback) {\n\t\tvalue = JSON.stringify(value)\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tdata[key] = {}\n\t\t\tdata[key][value] = true\n\t\t\tcallback && callback(null, 1)\n\t\t} else if (typeof data[key] == 'object') {\n\t\t\tvar response = data[key][value] ? 0 : 1\n\t\t\tdata[key][value] = true\n\t\t\tcallback && callback(null, response)\n\t\t} else {\n\t\t\tcallback && callback(typeError('sadd', 'set', key), null)\n\t\t}\n\t},\n\t\n\tsrem: function(key, value, callback) {\n\t\tvalue = JSON.stringify(value)\n\t\tif (typeof data[key] == 'undefined') {\n\t\t\tcallback && callback(null, 0)\n\t\t} else if (typeof data[key] == 'object') {\n\t\t\tvar response = data[key][value] ? 1 : 0\n\t\t\tdelete data[key][value]\n\t\t\tcallback && callback(null, response)\n\t\t} else {\n\t\t\tcallback && callback(typeError('srem', 'set', key), null)\n\t\t}\n\t}\n}\n"}}
-{"repo": "tauren/jquery-ui", "pr_number": 1, "title": "Fix sequential type ahead should through the list if not match is found for two same consecutive letters", "state": "closed", "merged_at": null, "additions": 743, "deletions": 140, "files_changed": ["demos/selectmenu/overlay.html", "demos/selectmenu/refresh.html", "demos/selectmenu/typeahead.html", "tests/unit/selectmenu/normalselect.html", "tests/unit/selectmenu/selectmenu.html", "tests/unit/selectmenu/selectmenu_typeahead_sequential.js", "ui/jquery.ui.selectmenu.js"], "files_before": {"ui/jquery.ui.selectmenu.js": " /*\n * jQuery UI selectmenu dev version\n *\n * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)\n * Dual licensed under the MIT (MIT-LICENSE.txt)\n * and GPL (GPL-LICENSE.txt) licenses.\n *\n * http://docs.jquery.com/UI\n * https://github.com/fnagel/jquery-ui/wiki/Selectmenu\n */\n\n(function($) {\n\n$.widget(\"ui.selectmenu\", {\n\tgetter: \"value\",\n\tversion: \"1.8\",\n\teventPrefix: \"selectmenu\",\n\toptions: {\n\t\ttransferClasses: true,\n\t\ttypeAhead: \"sequential\",\n\t\tstyle: 'dropdown',\n\t\tpositionOptions: {\n\t\t\tmy: \"left top\",\n\t\t\tat: \"left bottom\",\n\t\t\toffset: null\n\t\t},\n\t\twidth: null,\n\t\tmenuWidth: null,\n\t\thandleWidth: 26,\n\t\tmaxHeight: null,\n\t\ticons: null,\n\t\tformat: null,\n\t\tbgImage: function() {},\n\t\twrapperElement: \"\"\n\t},\n\n\t_create: function() {\n\t\tvar self = this, o = this.options;\n\n\t\t// set a default id value, generate a new random one if not set by developer\n\t\tvar selectmenuId = this.element.attr('id') || 'ui-selectmenu-' + Math.random().toString(16).slice(2, 10);\n\n\t\t// quick array of button and menu id's\n\t\tthis.ids = [ selectmenuId + '-button', selectmenuId + '-menu' ];\n\n\t\t// define safe mouseup for future toggling\n\t\tthis._safemouseup = true;\n\n\t\t// create menu button wrapper\n\t\tthis.newelement = $('')\n\t\t\t.insertAfter(this.element);\n\t\tthis.newelement.wrap(o.wrapperElement);\n\n\t\t// transfer tabindex\n\t\tvar tabindex = this.element.attr('tabindex');\n\t\tif (tabindex) {\n\t\t\tthis.newelement.attr('tabindex', tabindex);\n\t\t}\n\n\t\t// save reference to select in data for ease in calling methods\n\t\tthis.newelement.data('selectelement', this.element);\n\n\t\t// menu icon\n\t\tthis.selectmenuIcon = $('')\n\t\t\t.prependTo(this.newelement);\n\n\t\t// append status span to button\n\t\tthis.newelement.prepend('');\n\n\t\t// make associated form label trigger focus\n\t\t$( 'label[for=\"' + selectmenuId + '\"]' )\n\t\t\t.attr( 'for', this.ids[0] )\n\t\t\t.bind( 'click.selectmenu', function() {\n\t\t\t\tself.newelement[0].focus();\n\t\t\t\treturn false;\n\t\t\t});\n\n\t\t// click toggle for menu visibility\n\t\tthis.newelement\n\t\t\t.bind('mousedown.selectmenu', function(event) {\n\t\t\t\tself._toggle(event, true);\n\t\t\t\t// make sure a click won't open/close instantly\n\t\t\t\tif (o.style == \"popup\") {\n\t\t\t\t\tself._safemouseup = false;\n\t\t\t\t\tsetTimeout(function() { self._safemouseup = true; }, 300);\n\t\t\t\t}\n\t\t\t\treturn false;\n\t\t\t})\n\t\t\t.bind('click.selectmenu', function() {\n\t\t\t\treturn false;\n\t\t\t})\n\t\t\t.bind(\"keydown.selectmenu\", function(event) {\n\t\t\t\tvar ret = false;\n\t\t\t\tswitch (event.keyCode) {\n\t\t\t\t\tcase $.ui.keyCode.ENTER:\n\t\t\t\t\t\tret = true;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.SPACE:\n\t\t\t\t\t\tself._toggle(event);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.UP:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.open(event);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveSelection(-1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.DOWN:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.open(event);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveSelection(1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.LEFT:\n\t\t\t\t\t\tself._moveSelection(-1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.RIGHT:\n\t\t\t\t\t\tself._moveSelection(1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.TAB:\n\t\t\t\t\t\tret = true;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tret = true;\n\t\t\t\t}\n\t\t\t\treturn ret;\n\t\t\t})\n\t\t\t.bind('keypress.selectmenu', function(event) {\n\t\t\t\tself._typeAhead(event.which, 'mouseup');\n\t\t\t\treturn true;\n\t\t\t})\n\t\t\t.bind('mouseover.selectmenu focus.selectmenu', function() {\n\t\t\t\tif (!o.disabled) {\n\t\t\t\t\t$(this).addClass(self.widgetBaseClass + '-focus ui-state-hover');\n\t\t\t\t}\n\t\t\t})\n\t\t\t.bind('mouseout.selectmenu blur.selectmenu', function() {\n\t\t\t\tif (!o.disabled) {\n\t\t\t\t\t$(this).removeClass(self.widgetBaseClass + '-focus ui-state-hover');\n\t\t\t\t}\n\t\t\t});\n\n\t\t// document click closes menu\n\t\t$(document).bind(\"mousedown.selectmenu\", function(event) {\n\t\t\tself.close(event);\n\t\t});\n\n\t\t// change event on original selectmenu\n\t\tthis.element\n\t\t\t.bind(\"click.selectmenu\", function() {\n\t\t\t\tself._refreshValue();\n\t\t\t})\n\t\t\t// FIXME: newelement can be null under unclear circumstances in IE8\n\t\t\t// TODO not sure if this is still a problem (fnagel 20.03.11)\n\t\t\t.bind(\"focus.selectmenu\", function() {\n\t\t\t\tif (self.newelement) {\n\t\t\t\t\tself.newelement[0].focus();\n\t\t\t\t}\n\t\t\t});\n\n\t\t// set width when not set via options\n\t\tif (!o.width) {\n\t\t\to.width = this.element.outerWidth();\n\t\t}\n\t\t// set menu button width\n\t\tthis.newelement.width(o.width);\n\n\t\t// hide original selectmenu element\n\t\tthis.element.hide();\n\n\t\t// create menu portion, append to body\n\t\tthis.list = $('
').appendTo('body');\n\t\tthis.list.wrap(o.wrapperElement);\n\n\t\t// transfer menu click to menu button\n\t\tthis.list\n\t\t\t.bind(\"keydown.selectmenu\", function(event) {\n\t\t\t\tvar ret = false;\n\t\t\t\tswitch (event.keyCode) {\n\t\t\t\t\tcase $.ui.keyCode.UP:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveFocus(-1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.DOWN:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveFocus(1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.LEFT:\n\t\t\t\t\t\tself._moveFocus(-1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.RIGHT:\n\t\t\t\t\t\tself._moveFocus(1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.HOME:\n\t\t\t\t\t\tself._moveFocus(':first');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.PAGE_UP:\n\t\t\t\t\t\tself._scrollPage('up');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.PAGE_DOWN:\n\t\t\t\t\t\tself._scrollPage('down');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.END:\n\t\t\t\t\t\tself._moveFocus(':last');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.ENTER:\n\t\t\t\t\tcase $.ui.keyCode.SPACE:\n\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t$(event.target).parents('li:eq(0)').trigger('mouseup');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.TAB:\n\t\t\t\t\t\tret = true;\n\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t$(event.target).parents('li:eq(0)').trigger('mouseup');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.ESCAPE:\n\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tret = true;\n\t\t\t\t}\n\t\t\t\treturn ret;\n\t\t\t})\n\t\t\t.bind('keypress.selectmenu', function(event) {\n\t\t\t\tself._typeAhead(event.which, 'focus');\n\t\t\t\treturn true;\n\t\t\t})\n\t\t\t// this allows for using the scrollbar in an overflowed list\n\t\t\t.bind( 'mousedown.selectmenu mouseup.selectmenu', function() { return false; });\n\n\n\t\t// needed when window is resized\n\t\t$(window).bind( \"resize.selectmenu\", $.proxy( self._refreshPosition, this ) );\n\t},\n\n\t_init: function() {\n\t\tvar self = this, o = this.options;\n\n\t\t// serialize selectmenu element options\n\t\tvar selectOptionData = [];\n\t\tthis.element\n\t\t\t.find('option')\n\t\t\t.each(function() {\n\t\t\t\tselectOptionData.push({\n\t\t\t\t\tvalue: $(this).attr('value'),\n\t\t\t\t\ttext: self._formatText($(this).text()),\n\t\t\t\t\tselected: $(this).attr('selected'),\n\t\t\t\t\tdisabled: $(this).attr('disabled'),\n\t\t\t\t\tclasses: $(this).attr('class'),\n\t\t\t\t\ttypeahead: $(this).attr('typeahead'),\n\t\t\t\t\tparentOptGroup: $(this).parent('optgroup'),\n\t\t\t\t\tbgImage: o.bgImage.call($(this))\n\t\t\t\t});\n\t\t\t});\n\n\t\t// active state class is only used in popup style\n\t\tvar activeClass = (self.options.style == \"popup\") ? \" ui-state-active\" : \"\";\n\n\t\t// empty list so we can refresh the selectmenu via selectmenu()\n\t\tthis.list.html(\"\");\n\n\t\t// write li's\n\t\tfor (var i = 0; i < selectOptionData.length; i++) {\n\t\t\t\tvar thisLi = $('
\n\t\n", "tests/unit/selectmenu/selectmenu.html": "\n\n\n\t\n\tjQuery UI SelectMenu Test Suite\n\n\t\n\t\n\t\n\t\n\t\n\t\n\t\n\t\n\t\n\t\n\t\n\t\n\t\n\n\t\t\n\n\n\n\n
jQuery UI SelectMenu Test Suite
\n\n\n\n\n\n\n", "tests/unit/selectmenu/selectmenu_typeahead_sequential.js": "/*\n * selectmenu_typeahead_sequential.js\n */\n(function($) {\n\n\tvar currentTestId = 0;\n\tvar currentSelectMenu;\n\tvar currentSelectMenuButton;\n\t\n\tvar createKeypressEvent = function(charCode) {\n\t\tvar e = jQuery.Event(\"keypress\");\n\t\te.which = charCode;\n\t\treturn e;\n\t};\n\t\n\tvar s = createKeypressEvent(83);\t\n\tvar j = createKeypressEvent(74);\t\n\tvar o = createKeypressEvent(79);\n\tvar enter = createKeypressEvent(13);\n\tvar down = createKeypressEvent(40);\n\t\n\tvar jOptions = [\n\t\t'James',\n\t\t'John',\n\t\t'Jzer'\t\t\n\t];\n\t\n\tvar sOptions = [\n\t\t'Sam',\n\t\t'Shirly'\n\t];\n\t\n\tvar setupSelectMenu = function(id) {\n\t\tvar fieldset = $('');\n\t\t$('')\n\t\t\t.appendTo(fieldset);\n\t\t$('')\n\t\t\t.appendTo(fieldset);\n\t\t\t\n\t\tfieldset.appendTo('body');\n\t\t$('select#cycling' + id).selectmenu();\n\t\t\n\t\treturn $('select#cycling' + id);\n\t};\n\t\n\tvar setupSelectMenuWithSOptionSelected = function(id) {\n\t\tvar fieldset = $('');\n\t\t$('')\n\t\t\t.appendTo(fieldset);\n\t\t$('')\n\t\t\t.appendTo(fieldset);\n\t\t\t\n\t\tfieldset.appendTo('body');\n\t\t$('select#cycling' + id).selectmenu();\n\t\t\n\t\treturn $('select#cycling' + id);\n\t};\n\t\n\tvar teardownSelectMenu = function(id) {\n\t\t$('select#cycling' + id + '').selectmenu(\"destroy\").remove();\n\t\t$('label[for=\"cycling' + id + '\"]').remove();\n\t\t$('fieldset#field' + id).remove();\n\t};\n\n\tmodule(\"selectmenu: sequential typeahead - given three items starts with 'j' and two items start with s\", {\n\t\tsetup: function() {\n\t\t\tcurrentSelectMenu = setupSelectMenu(currentTestId);\n\t\t\tcurrentSelectMenuButton = $('#cycling' + currentTestId + '-button');\t\t\t\n\t\t\tcurrentSelectMenuButton.trigger('mousedown.selectmenu');\n\t\t},\n\t\tteardown: function() {\n\t\t\t//teardownSelectMenu(currentTestId);\n\t\t\tcurrentTestId++;\n\t\t}\n\t});\n\n\ttest(\"one 'j' should select first item that starts with j\", function() {\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(enter);\n\t\t\n\t\tequals(currentSelectMenu.val(), jOptions[0]);\n\t});\n\t\n\ttest(\"two 'j' should select second item that starts with j\", function() {\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\t\n\t\tequals(currentSelectMenu.val(), jOptions[1]);\n\t});\n\t\n\ttest(\"three 'j' should select third item that starts with j\", function() {\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\t\n\t\tequals(currentSelectMenu.val(), jOptions[2])\n\t});\n\t\n\ttest(\"four 'j' should select first item that starts with j\", function() {\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\t\n\t\tequals(currentSelectMenu.val(), jOptions[0])\n\t});\n\t\n\ttest(\"typing 'jo' should select first item that starts with 'jo'\", function() {\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(o);\n\t\t\n\t\tvar firstOptionThatStartsWithJO = jOptions[1];\n\t\t\n\t\tequals(currentSelectMenu.val(), firstOptionThatStartsWithJO)\n\t});\n\t\n\ttest(\"one 's' should select first item that starts with s\", function() {\t\t\n\t\tcurrentSelectMenuButton.trigger(s);\n\t\t\n\t\tequals(currentSelectMenu.val(), sOptions[0]);\n\t});\n\t\n\ttest(\"one 's' followed by two 'j' should select second item that starts with j\", function() {\t\t\n\t\tcurrentSelectMenuButton.trigger(s);\n\t\tcurrentSelectMenuButton.trigger(enter);\n\t\t\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(enter);\n\t\t\n\t\tequals( currentSelectMenu.val(), jOptions[1]);\n\t});\n\t\n\ttest(\"'sjs' should select first item that starts with s\", function() {\t\t\n\t\tcurrentSelectMenuButton.trigger(s);\t\t\n\t\tcurrentSelectMenuButton.trigger(j);\n\t\tcurrentSelectMenuButton.trigger(s);\n\t\t\n\t\tequals( currentSelectMenu.val(), sOptions[0]);\n\t});\n\t\n})(jQuery);", "ui/jquery.ui.selectmenu.js": " /*\n * jQuery UI selectmenu dev version\n *\n * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)\n * Dual licensed under the MIT (MIT-LICENSE.txt)\n * and GPL (GPL-LICENSE.txt) licenses.\n *\n * http://docs.jquery.com/UI\n * https://github.com/fnagel/jquery-ui/wiki/Selectmenu\n */\n\n(function($) {\n\n$.widget(\"ui.selectmenu\", {\n\tgetter: \"value\",\n\tversion: \"1.8\",\n\teventPrefix: \"selectmenu\",\n\toptions: {\n\t\ttransferClasses: true,\n\t\ttypeAhead: \"sequential\",\n\t\tstyle: 'dropdown',\n\t\tpositionOptions: {\n\t\t\tmy: \"left top\",\n\t\t\tat: \"left bottom\",\n\t\t\toffset: null\n\t\t},\n\t\twidth: null,\n\t\tmenuWidth: null,\n\t\thandleWidth: 26,\n\t\tmaxHeight: null,\n\t\ticons: null,\n\t\tformat: null,\n\t\tbgImage: function() {},\n\t\twrapperElement: \"\"\n\t},\n\n\t_create: function() {\n\t\tvar self = this, o = this.options;\n\n\t\t// set a default id value, generate a new random one if not set by developer\n\t\tvar selectmenuId = this.element.attr('id') || 'ui-selectmenu-' + Math.random().toString(16).slice(2, 10);\n\n\t\t// quick array of button and menu id's\n\t\tthis.ids = [ selectmenuId + '-button', selectmenuId + '-menu' ];\n\n\t\t// define safe mouseup for future toggling\n\t\tthis._safemouseup = true;\n\n\t\t// create menu button wrapper\n\t\tthis.newelement = $('')\n\t\t\t.insertAfter(this.element);\n\t\tthis.newelement.wrap(o.wrapperElement);\n\n\t\t// transfer tabindex\n\t\tvar tabindex = this.element.attr('tabindex');\n\t\tif (tabindex) {\n\t\t\tthis.newelement.attr('tabindex', tabindex);\n\t\t}\n\n\t\t// save reference to select in data for ease in calling methods\n\t\tthis.newelement.data('selectelement', this.element);\n\n\t\t// menu icon\n\t\tthis.selectmenuIcon = $('')\n\t\t\t.prependTo(this.newelement);\n\n\t\t// append status span to button\n\t\tthis.newelement.prepend('');\n\n\t\t// make associated form label trigger focus\n\t\t$( 'label[for=\"' + selectmenuId + '\"]' )\n\t\t\t.attr( 'for', this.ids[0] )\n\t\t\t.bind( 'click.selectmenu', function() {\n\t\t\t\tself.newelement[0].focus();\n\t\t\t\treturn false;\n\t\t\t});\n\n\t\t// click toggle for menu visibility\n\t\tthis.newelement\n\t\t\t.bind('mousedown.selectmenu', function(event) {\n\t\t\t\tself._toggle(event, true);\n\t\t\t\t// make sure a click won't open/close instantly\n\t\t\t\tif (o.style == \"popup\") {\n\t\t\t\t\tself._safemouseup = false;\n\t\t\t\t\tsetTimeout(function() { self._safemouseup = true; }, 300);\n\t\t\t\t}\n\t\t\t\treturn false;\n\t\t\t})\n\t\t\t.bind('click.selectmenu', function() {\n\t\t\t\treturn false;\n\t\t\t})\n\t\t\t.bind(\"keydown.selectmenu\", function(event) {\n\t\t\t\tvar ret = false;\n\t\t\t\tswitch (event.keyCode) {\n\t\t\t\t\tcase $.ui.keyCode.ENTER:\n\t\t\t\t\t\tret = true;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.SPACE:\n\t\t\t\t\t\tself._toggle(event);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.UP:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.open(event);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveSelection(-1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.DOWN:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.open(event);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveSelection(1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.LEFT:\n\t\t\t\t\t\tself._moveSelection(-1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.RIGHT:\n\t\t\t\t\t\tself._moveSelection(1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.TAB:\n\t\t\t\t\t\tret = true;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tret = true;\n\t\t\t\t}\n\t\t\t\treturn ret;\n\t\t\t})\n\t\t\t.bind('keypress.selectmenu', function(event) {\n\t\t\t\tself._typeAhead(event.which, 'mouseup');\n\t\t\t\treturn true;\n\t\t\t})\n\t\t\t.bind('mouseover.selectmenu focus.selectmenu', function() {\n\t\t\t\tif (!o.disabled) {\n\t\t\t\t\t$(this).addClass(self.widgetBaseClass + '-focus ui-state-hover');\n\t\t\t\t}\n\t\t\t})\n\t\t\t.bind('mouseout.selectmenu blur.selectmenu', function() {\n\t\t\t\tif (!o.disabled) {\n\t\t\t\t\t$(this).removeClass(self.widgetBaseClass + '-focus ui-state-hover');\n\t\t\t\t}\n\t\t\t});\n\n\t\t// document click closes menu\n\t\t$(document).bind(\"mousedown.selectmenu\", function(event) {\n\t\t\tself.close(event);\n\t\t});\n\n\t\t// change event on original selectmenu\n\t\tthis.element\n\t\t\t.bind(\"click.selectmenu\", function() {\n\t\t\t\tself._refreshValue();\n\t\t\t})\n\t\t\t// FIXME: newelement can be null under unclear circumstances in IE8\n\t\t\t// TODO not sure if this is still a problem (fnagel 20.03.11)\n\t\t\t.bind(\"focus.selectmenu\", function() {\n\t\t\t\tif (self.newelement) {\n\t\t\t\t\tself.newelement[0].focus();\n\t\t\t\t}\n\t\t\t});\n\n\t\t// set width when not set via options\n\t\tif (!o.width) {\n\t\t\to.width = this.element.outerWidth();\n\t\t}\n\t\t// set menu button width\n\t\tthis.newelement.width(o.width);\n\n\t\t// hide original selectmenu element\n\t\tthis.element.hide();\n\n\t\t// create menu portion, append to body\n\t\tthis.list = $('
').appendTo('body');\n\t\tthis.list.wrap(o.wrapperElement);\n\n\t\t// transfer menu click to menu button\n\t\tthis.list\n\t\t\t.bind(\"keydown.selectmenu\", function(event) {\n\t\t\t\tvar ret = false;\n\t\t\t\tswitch (event.keyCode) {\n\t\t\t\t\tcase $.ui.keyCode.UP:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveFocus(-1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.DOWN:\n\t\t\t\t\t\tif (event.altKey) {\n\t\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself._moveFocus(1);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.LEFT:\n\t\t\t\t\t\tself._moveFocus(-1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.RIGHT:\n\t\t\t\t\t\tself._moveFocus(1);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.HOME:\n\t\t\t\t\t\tself._moveFocus(':first');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.PAGE_UP:\n\t\t\t\t\t\tself._scrollPage('up');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.PAGE_DOWN:\n\t\t\t\t\t\tself._scrollPage('down');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.END:\n\t\t\t\t\t\tself._moveFocus(':last');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.ENTER:\n\t\t\t\t\tcase $.ui.keyCode.SPACE:\n\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t$(event.target).parents('li:eq(0)').trigger('mouseup');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.TAB:\n\t\t\t\t\t\tret = true;\n\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\t$(event.target).parents('li:eq(0)').trigger('mouseup');\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase $.ui.keyCode.ESCAPE:\n\t\t\t\t\t\tself.close(event, true);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tret = true;\n\t\t\t\t}\n\t\t\t\treturn ret;\n\t\t\t})\n\t\t\t.bind('keypress.selectmenu', function(event) {\n\t\t\t\tself._typeAhead(event.which, 'focus');\n\t\t\t\treturn true;\n\t\t\t})\n\t\t\t// this allows for using the scrollbar in an overflowed list\n\t\t\t.bind( 'mousedown.selectmenu mouseup.selectmenu', function() { return false; });\n\n\n\t\t// needed when window is resized\n\t\t$(window).bind( \"resize.selectmenu\", $.proxy( self._refreshPosition, this ) );\n\t},\n\n\t_init: function() {\n\t\tvar self = this, o = this.options;\n\n\t\t// serialize selectmenu element options\n\t\tvar selectOptionData = [];\n\t\tthis.element\n\t\t\t.find('option')\n\t\t\t.each(function() {\n\t\t\t\tvar opt = $(this);\n\t\t\t\tselectOptionData.push({\n\t\t\t\t\tvalue: opt.attr('value'),\n\t\t\t\t\ttext: self._formatText($(this).text()),\n\t\t\t\t\tselected: opt.attr('selected'),\n\t\t\t\t\tdisabled: opt.attr('disabled'),\n\t\t\t\t\tclasses: opt.attr('class'),\n\t\t\t\t\ttypeahead: opt.attr('typeahead'),\n\t\t\t\t\tparentOptGroup: opt.parent('optgroup'),\n\t\t\t\t\tbgImage: o.bgImage.call(opt)\n\t\t\t\t});\n\t\t\t});\n\n\t\t// active state class is only used in popup style\n\t\tvar activeClass = (self.options.style == \"popup\") ? \" ui-state-active\" : \"\";\n\n\t\t// empty list so we can refresh the selectmenu via selectmenu()\n\t\tthis.list.html(\"\");\n\n\t\t// write li's\n\t\tif (selectOptionData.length) {\n\t\t\tfor (var i = 0; i < selectOptionData.length; i++) {\n\t\t\t\tvar thisLi = $('
').appendTo(this.list);\n\t\t}\n\t\t// we need to set and unset the CSS classes for dropdown and popup style\n\t\tvar isDropDown = ( o.style == 'dropdown' );\n\t\tthis.newelement\n\t\t\t.toggleClass( self.widgetBaseClass + '-dropdown', isDropDown )\n\t\t\t.toggleClass( self.widgetBaseClass + '-popup', !isDropDown );\n\t\tthis.list\n\t\t\t.toggleClass( self.widgetBaseClass + '-menu-dropdown ui-corner-bottom', isDropDown )\n\t\t\t.toggleClass( self.widgetBaseClass + '-menu-popup ui-corner-all', !isDropDown )\n\t\t\t// add corners to top and bottom menu items\n\t\t\t.find( 'li:first' )\n\t\t\t.toggleClass( 'ui-corner-top', !isDropDown )\n\t\t\t.end().find( 'li:last' )\n\t\t\t.addClass( 'ui-corner-bottom' );\n\t\tthis.selectmenuIcon\n\t\t\t.toggleClass( 'ui-icon-triangle-1-s', isDropDown )\n\t\t\t.toggleClass( 'ui-icon-triangle-2-n-s', !isDropDown );\n\n\t\t// transfer classes to selectmenu and list\n\t\tif ( o.transferClasses ) {\n\t\t\tvar transferClasses = this.element.attr( 'class' ) || '';\n\t\t\tthis.newelement.add( this.list ).addClass( transferClasses );\n\t\t}\n\n\t\t// set menu width to either menuWidth option value, width option value, or select width\n\t\tif ( o.style == 'dropdown' ) {\n\t\t\tthis.list.width( o.menuWidth ? o.menuWidth : o.width );\n\t\t} else {\n\t\t\tthis.list.width( o.menuWidth ? o.menuWidth : o.width - o.handleWidth );\n\t\t}\n\t\to.minMenuWidth && this.list.width() < o.minMenuWidth && this.list.width(o.minMenuWidth);\n\n\t\t// reset height to auto\n\t\tthis.list.css(\"height\", \"auto\");\n\t\tvar listH = this.list.height();\n\t\t// calculate default max height\n\t\tif ( o.maxHeight && o.maxHeight < listH) {\n\t\t\tthis.list.height( o.maxHeight );\n\t\t} else {\n\t\t\tvar winH = $( window ).height() / 3;\n\t\t\tif ( winH < listH ) this.list.height( winH );\t\n\t\t}\n\t\t\n\t\t// save reference to actionable li's (not group label li's)\n\t\tthis._optionLis = this.list.find( 'li:not(.' + self.widgetBaseClass + '-group)' );\n\n\t\t// transfer disabled state\n\t\tif ( this.element.attr( 'disabled' ) === true ) {\n\t\t\tthis.disable();\n\t\t} else {\n\t\t\tthis.enable()\n\t\t}\n\t\t\n\t\t// update value\n\t\tthis.index( this._selectedIndex() );\n\n\t\t// needed when selectmenu is placed at the very bottom / top of the page\n\t\twindow.setTimeout( function() {\n\t\t\tself._refreshPosition();\n\t\t}, 200 );\n\t},\n\n\tdestroy: function() {\n\t\tthis.element.removeData( this.widgetName )\n\t\t\t.removeClass( this.widgetBaseClass + '-disabled' + ' ' + this.namespace + '-state-disabled' )\n\t\t\t.removeAttr( 'aria-disabled' )\n\t\t\t.unbind( \".selectmenu\" );\n\n\t\t$( window ).unbind( \".selectmenu\" );\n\t\t$( document ).unbind( \".selectmenu\" );\n\n\t\t// unbind click on label, reset its for attr\n\t\t$( 'label[for=' + this.newelement.attr('id') + ']' )\n\t\t\t.attr( 'for', this.element.attr( 'id' ) )\n\t\t\t.unbind( '.selectmenu' );\n\n\t\tif ( this.options.wrapperElement ) {\n\t\t\tthis.newelement.find( this.options.wrapperElement ).remove();\n\t\t\tthis.list.find( this.options.wrapperElement ).remove();\n\t\t} else {\n\t\t\tthis.newelement.remove();\n\t\t\tthis.list.remove();\n\t\t}\n\t\tthis.element.show();\n\n\t\t// call widget destroy function\n\t\t$.Widget.prototype.destroy.apply(this, arguments);\n\t},\n\n\t_typeAhead: function(code, eventType){\n\t\tvar self = this, focusFound = false, C = String.fromCharCode(code).toUpperCase(), c = C.toLowerCase();\n\n\t\tif (self.options.typeAhead == 'sequential') {\n\t\t\tvar currentFocusedPosition = {\n\t\t\t\tget: function() {\n\t\t\t\t\tif (typeof(self._currentFocusedPosition) === 'undefined') {\n\t\t\t\t\t\tself._currentFocusedPosition = {};\n\t\t\t\t\t}\n\t\t\t\t\treturn self._currentFocusedPosition;\n\t\t\t\t},\n\t\t\t\treset:function() {\n\t\t\t\t\tself._currentFocusedPosition = {};\n\t\t\t\t}\n\t\t\t};\n\t\t\t\n\t\t\tvar prevChars = {\n\t\t\t\tisEmpty: function() {\n\t\t\t\t\treturn typeof(self._prevChar) === 'undefined';\n\t\t\t\t},\n\t\t\t\tlastCharIsNotEqualTo: function(character) {\n\t\t\t\t\treturn (self._prevChar[self._prevChar.length - 1] !== character.toLowerCase() && \n\t\t\t\t\t\tself._prevChar[self._prevChar.length - 1] !== character.toUpperCase());\n\t\t\t\t},\n\t\t\t\ttoString: function() {\n\t\t\t\t\treturn typeof(self._prevChar) === 'undefined' ? \n\t\t\t\t\t\t'' : \n\t\t\t\t\t\tself._prevChar.join('');\n\t\t\t\t},\n\t\t\t\treset: function() {\n\t\t\t\t\tself._prevChar = undefined;\n\t\t\t\t},\n\t\t\t\tadd: function(c) {\n\t\t\t\t\ttypeof(self._prevChar) === 'undefined' ? \n\t\t\t\t\t\tself._prevChar = [c] : \n\t\t\t\t\t\tself._prevChar[self._prevChar.length] = c;\n\t\t\t\t}\t\t\t\n\t\t\t};\t\t\n\n\t\t\tvar clearPrevCharTimeout = {\n\t\t\t\tget: function() {\n\t\t\t\t\treturn self._clearPrevChar;\n\t\t\t\t},\n\t\t\t\tset: function(value) {\n\t\t\t\t\tself._clearPrevChar = value;\n\t\t\t\t}, \n\t\t\t\texists: function() {\n\t\t\t\t\treturn typeof(self._clearPrevChar) !== 'undefined';\n\t\t\t\t},\n\t\t\t\treset: function() {\n\t\t\t\t\tself._clearPrevChar = undefined;\n\t\t\t\t}\n\t\t\t};\t\t\t\n\n\t\t\t\n\t\t\tfunction startWithSpecification(text, pattern) {\n\t\t\t\treturn (text.toLowerCase().indexOf( pattern.toLowerCase() ) === 0);\n\t\t\t}\n\t\t\t\n\t\t\tfunction lessThanCurrentFocusedPositionSpecification(index, character) {\n\t\t\t\treturn index <= currentFocusedPosition.get()[c];\n\t\t\t}\n\t\t\t\n\t\t\tfunction focusOptSeq(elem){\n\t\t\t\t$(elem).trigger(eventType);\n\t\t\t}\n\t\t\t\n\t\t\tfunction matchIsFound(character, indexOfMatch) {\n\t\t\t\tprevChars.add(character);\t\t\t\t\t\t\t\t\n\t\t\t\tcurrentFocusedPosition.get()[character.toLowerCase()] = indexOfMatch;\n\t\t\t}\n\t\t\t\n\t\t\t\t\t\t\n\t\t\tfunction matchFirstCharFromTheBeginningOfList(listToSearch) {\n\t\t\t\tmatchFirstLetter.call(this, listToSearch);\n\t\t\t}\n\n\t\t\tfunction matchFirstLetter(listToSearch) {\t\t\t\t\n\t\t\t\treturn searchListForCharSuffixedby.apply(this, [listToSearch, '']);\n\t\t\t}\n\t\t\t\n\t\t\tfunction matchByString(listToSearch, pattern) {\t\t\t\t\n\t\t\t\treturn searchListForCharSuffixedby.apply(this, [listToSearch, pattern]);\n\t\t\t}\t\t\n\t\t\t\n\t\t\tfunction matchByFirstCharFromCurrentFocusedPosition(listToSearch) {\n\t\t\t\treturn searchListForCharSuffixedby.apply(this, \n\t\t\t\t\t[listToSearch, '', lessThanCurrentFocusedPositionSpecification]);\n\t\t\t}\n\t\t\t\n\t\t\tfunction searchListForCharSuffixedby(listToSearch, pattern, preCondition) {\t\t\t\n\t\t\t\tvar found = false;\t\n\t\t\t\t\n\t\t\t\tlistToSearch.each(function(i) {\n\t\t\t\t\tif (!found) {\n\t\t\t\t\t\tif (preCondition && preCondition(i, c)) {\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\t\t\t\t\t\n\t\t\t\t\t\t// allow the typeahead attribute on the option tag for a more specific lookup\n\t\t\t\t\t\tvar thisText = $(this).attr('typeahead') || $(this).text();\n\t\t\t\t\t\t\n\t\t\t\t\t\t// allow the typeahead attribute on the option tag for a more specific lookup\n\t\t\t\t\t\tif ( !startWithSpecification(thisText, (pattern || '') + c) ) {\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\t\t\t\t\t\t\n\t\t\t\t\t\tmatchIsFound(c, i);\n\t\t\t\t\t\tfocusOptSeq(this);\n\t\t\t\t\t\tfound = true;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\t\n\t\t\t\treturn found;\n\t\t\t}\t\n\t\t\t\n\t\t\tfunction doSearch() {\n\t\t\t\tvar listToSearch = this.list.find('li a');\n\t\t\t\t\n\t\t\t\t// for the 1st letter, should select the first item which has the same first letter\n\t\t\t\tif ( prevChars.isEmpty() ) {\n\t\t\t\t\tmatchFirstLetter.call(this, listToSearch);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\t// try to match by string first\n\t\t\t\tif ( matchByString.call(this, listToSearch, prevChars.toString()) ) {\n\t\t\t\t\treturn;\n\t\t\t\t}\t\t\t\t\t\n\t\t\t\t\n\t\t\t\t// if previous char is different from current char, flush the currentFocusedPosition\n\t\t\t\tif ( prevChars.lastCharIsNotEqualTo(c) ) {\n\t\t\t\t\tcurrentFocusedPosition.reset();\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\t// try to match by the first letter starting from current position\n\t\t\t\tif ( matchByFirstCharFromCurrentFocusedPosition.call(this, listToSearch) ) {\t\t\t\t\n\t\t\t\t\treturn;\n\t\t\t\t}\t\t\t\t\n\t\t\t\t\n\t\t\t\t// lastly, try to rematch from the beginning again\n\t\t\t\tmatchFirstCharFromTheBeginningOfList.call(this, listToSearch);\n\t\t\t}\n\t\t\t\n\t\t\tfunction clearPreviousTimeouts() {\n\t\t\t\t// TODO: it's not very clear what this is needed for?\n\t\t\t\t// clear the timeout so we can use _prevChar\n\t\t\t\twindow.clearTimeout('ui.selectmenu-' + self.selectmenuId);\n\t\t\t\t\n\t\t\t\tif ( clearPrevCharTimeout.exists() ) {\n\t\t\t\t\twindow.clearTimeout( clearPrevCharTimeout.get() );\n\t\t\t\t\tclearPrevCharTimeout.reset();\n\t\t\t\t}\n\t\t\t}\n\t\t\t\n\t\t\tfunction setTimeoutToClear() {\n\t\t\t\t// set a 1 second timeout for sequential typeahead\n\t\t\t\t// keep this set even if we have no matches so it doesnt typeahead somewhere else\t\n\t\t\t\tclearPrevCharTimeout.set(\n\t\t\t\t\twindow.setTimeout(function(el) {\n\t\t\t\t\t\tprevChars.reset();\n\t\t\t\t\t\tcurrentFocusedPosition.reset();\n\t\t\t\t\t}, 1000, self));\n\t\t\t}\n\t\t\t\n\t\t\tclearPreviousTimeouts();\n\t\t\tdoSearch.call(this);\n\t\t\tsetTimeoutToClear();\n\t\t\t\n\t\t} else {\n\t\t\t//define self._prevChar if needed\n\t\t\tif (!self._prevChar){ self._prevChar = ['',0]; }\n\n\t\t\tvar focusFound = false;\n\t\t\tfunction focusOpt(elem, ind){\n\t\t\t\tfocusFound = true;\n\t\t\t\t$(elem).trigger(eventType);\n\t\t\t\tself._prevChar[1] = ind;\n\t\t\t}\n\t\t\tthis.list.find('li a').each(function(i){\n\t\t\t\tif(!focusFound){\n\t\t\t\t\tvar thisText = $(this).text();\n\t\t\t\t\tif( thisText.indexOf(C) == 0 || thisText.indexOf(c) == 0){\n\t\t\t\t\t\t\tif(self._prevChar[0] == C){\n\t\t\t\t\t\t\t\tif(self._prevChar[1] < i){ focusOpt(this,i); }\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\telse{ focusOpt(this,i); }\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t\tthis._prevChar[0] = C;\n\t\t}\n\t},\n\n\t// returns some usefull information, called by callbacks only\n\t_uiHash: function() {\n\t\tvar index = this.index();\n\t\treturn {\n\t\t\tindex: index,\n\t\t\toption: $(\"option\", this.element).get(index),\n\t\t\tvalue: this.element[0].value\n\t\t};\n\t},\n\n\topen: function(event) {\n\t\tvar self = this;\n\t\tif ( this.newelement.attr(\"aria-disabled\") != 'true' ) {\n\t\t\tthis._closeOthers(event);\n\t\t\tthis.newelement\n\t\t\t\t.addClass('ui-state-active');\n\t\t\tif (self.options.wrapperElement) {\n\t\t\t\tthis.list.parent().appendTo('body');\n\t\t\t} else {\n\t\t\t\tthis.list.appendTo('body');\n\t\t\t}\n\n\t\t\tthis.list.addClass(self.widgetBaseClass + '-open')\n\t\t\t\t.attr('aria-hidden', false);\n\t\t\t\n\t\t\tselected = this.list.find('li:not(.' + self.widgetBaseClass + '-group):eq(' + this._selectedIndex() + ') a');\n\t\t\tif (selected.length) selected[0].focus();\n\t\t\t\n\t\t\tif ( this.options.style == \"dropdown\" ) {\n\t\t\t\tthis.newelement.removeClass('ui-corner-all').addClass('ui-corner-top');\n\t\t\t}\n\t\t\t\n\t\t\tthis._refreshPosition();\n\t\t\tthis._trigger(\"open\", event, this._uiHash());\n\t\t}\n\t},\n\n\tclose: function(event, retainFocus) {\n\t\tif ( this.newelement.is('.ui-state-active') ) {\n\t\t\tthis.newelement\n\t\t\t\t.removeClass('ui-state-active');\n\t\t\tthis.list\n\t\t\t\t.attr('aria-hidden', true)\n\t\t\t\t.removeClass(this.widgetBaseClass + '-open');\n\t\t\tif ( this.options.style == \"dropdown\" ) {\n\t\t\t\tthis.newelement.removeClass('ui-corner-top').addClass('ui-corner-all');\n\t\t\t}\n\t\t\tif ( retainFocus ) {\n\t\t\t\tthis.newelement.focus();\n\t\t\t}\n\t\t\tthis._trigger(\"close\", event, this._uiHash());\n\t\t}\n\t},\n\n\tchange: function(event) {\n\t\tthis.element.trigger(\"change\");\n\t\tthis._trigger(\"change\", event, this._uiHash());\n\t},\n\n\tselect: function(event) {\n\t\tif (this._disabled(event.currentTarget)) { return false; }\n\t\tthis._trigger(\"select\", event, this._uiHash());\n\t},\n\n\t_closeOthers: function(event) {\n\t\t$('.' + this.widgetBaseClass + '.ui-state-active').not(this.newelement).each(function() {\n\t\t\t$(this).data('selectelement').selectmenu('close', event);\n\t\t});\n\t\t$('.' + this.widgetBaseClass + '.ui-state-hover').trigger('mouseout');\n\t},\n\n\t_toggle: function(event, retainFocus) {\n\t\tif ( this.list.is('.' + this.widgetBaseClass + '-open') ) {\n\t\t\tthis.close(event, retainFocus);\n\t\t} else {\n\t\t\tthis.open(event);\n\t\t}\n\t},\n\n\t_formatText: function(text) {\n\t\treturn (this.options.format ? this.options.format(text) : text);\n\t},\n\n\t_selectedIndex: function() {\n\t\treturn this.element[0].selectedIndex;\n\t},\n\n\t_selectedOptionLi: function() {\n\t\treturn this._optionLis.eq(this._selectedIndex());\n\t},\n\n\t_focusedOptionLi: function() {\n\t\treturn this.list.find('.' + this.widgetBaseClass + '-item-focus');\n\t},\n\n\t_moveSelection: function(amt, recIndex) {\n\t\t// do nothing if disabled\n\t\tif (!this.options.disabled) {\n\t\t\tvar currIndex = parseInt(this._selectedOptionLi().data('index') || 0, 10);\n\t\t\tvar newIndex = currIndex + amt;\n\t\t\t// do not loop when using up key\n\n\t\t\tif (newIndex < 0) {\n\t\t\t\tnewIndex = 0;\n\t\t\t}\n\t\t\tif (newIndex > this._optionLis.size() - 1) {\n\t\t\t\tnewIndex = this._optionLis.size() - 1;\n\t\t\t}\n\t\t\t// Occurs when a full loop has been made\n\t\t\tif (newIndex === recIndex) { return false; }\n\n\t\t\tif (this._optionLis.eq(newIndex).hasClass( this.namespace + '-state-disabled' )) {\n\t\t\t\t// if option at newIndex is disabled, call _moveFocus, incrementing amt by one\n\t\t\t\t(amt > 0) ? ++amt : --amt;\n\t\t\t\tthis._moveSelection(amt, newIndex);\n\t\t\t} else {\n\t\t\t\treturn this._optionLis.eq(newIndex).trigger('mouseup');\n\t\t\t}\n\t\t}\n\t},\n\n\t_moveFocus: function(amt, recIndex) {\n\t\tif (!isNaN(amt)) {\n\t\t\tvar currIndex = parseInt(this._focusedOptionLi().data('index') || 0, 10);\n\t\t\tvar newIndex = currIndex + amt;\n\t\t}\n\t\telse {\n\t\t\tvar newIndex = parseInt(this._optionLis.filter(amt).data('index'), 10);\n\t\t}\n\n\t\tif (newIndex < 0) {\n\t\t\tnewIndex = 0;\n\t\t}\n\t\tif (newIndex > this._optionLis.size() - 1) {\n\t\t\tnewIndex = this._optionLis.size() - 1;\n\t\t}\n\n\t\t//Occurs when a full loop has been made\n\t\tif (newIndex === recIndex) { return false; }\n\n\t\tvar activeID = this.widgetBaseClass + '-item-' + Math.round(Math.random() * 1000);\n\n\t\tthis._focusedOptionLi().find('a:eq(0)').attr('id', '');\n\n\t\tif (this._optionLis.eq(newIndex).hasClass( this.namespace + '-state-disabled' )) {\n\t\t\t// if option at newIndex is disabled, call _moveFocus, incrementing amt by one\n\t\t\t(amt > 0) ? ++amt : --amt;\n\t\t\tthis._moveFocus(amt, newIndex);\n\t\t} else {\n\t\t\tthis._optionLis.eq(newIndex).find('a:eq(0)').attr('id',activeID).focus();\n\t\t}\n\n\t\tthis.list.attr('aria-activedescendant', activeID);\n\t},\n\n\t_scrollPage: function(direction) {\n\t\tvar numPerPage = Math.floor(this.list.outerHeight() / this.list.find('li:first').outerHeight());\n\t\tnumPerPage = (direction == 'up' ? -numPerPage : numPerPage);\n\t\tthis._moveFocus(numPerPage);\n\t},\n\n\t_setOption: function(key, value) {\n\t\tthis.options[key] = value;\n\t\t// set\n\t\tif (key == 'disabled') {\n\t\t\tthis.close();\n\t\t\tthis.element\n\t\t\t\t.add(this.newelement)\n\t\t\t\t.add(this.list)[value ? 'addClass' : 'removeClass'](\n\t\t\t\t\tthis.widgetBaseClass + '-disabled' + ' ' +\n\t\t\t\t\tthis.namespace + '-state-disabled')\n\t\t\t\t.attr(\"aria-disabled\", value);\n\t\t}\n\t},\n\n\tdisable: function(index, type){\n\t\t\t// if options is not provided, call the parents disable function\n\t\t\tif ( typeof( index ) == 'undefined' ) {\n\t\t\t\tthis._setOption( 'disabled', true );\n\t\t\t} else {\n\t\t\t\tif ( type == \"optgroup\" ) {\n\t\t\t\t\tthis._disableOptgroup(index);\n\t\t\t\t} else {\n\t\t\t\t\tthis._disableOption(index);\n\t\t\t\t}\n\t\t\t}\n\t},\n\n\tenable: function(index, type) {\n\t\t\t// if options is not provided, call the parents enable function\n\t\t\tif ( typeof( index ) == 'undefined' ) {\n\t\t\t\tthis._setOption('disabled', false);\n\t\t\t} else {\n\t\t\t\tif ( type == \"optgroup\" ) {\n\t\t\t\t\tthis._enableOptgroup(index);\n\t\t\t\t} else {\n\t\t\t\t\tthis._enableOption(index);\n\t\t\t\t}\n\t\t\t}\n\t},\n\n\t_disabled: function(elem) {\n\t\t\treturn $(elem).hasClass( this.namespace + '-state-disabled' );\n\t},\n\n\n\t_disableOption: function(index) {\n\t\t\tvar optionElem = this._optionLis.eq(index);\n\t\t\tif (optionElem) {\n\t\t\t\toptionElem.addClass(this.namespace + '-state-disabled')\n\t\t\t\t\t.find(\"a\").attr(\"aria-disabled\", true);\n\t\t\t\tthis.element.find(\"option\").eq(index).attr(\"disabled\", \"disabled\");\n\t\t\t}\n\t},\n\n\t_enableOption: function(index) {\n\t\t\tvar optionElem = this._optionLis.eq(index);\n\t\t\tif (optionElem) {\n\t\t\t\toptionElem.removeClass( this.namespace + '-state-disabled' )\n\t\t\t\t\t.find(\"a\").attr(\"aria-disabled\", false);\n\t\t\t\tthis.element.find(\"option\").eq(index).removeAttr(\"disabled\");\n\t\t\t}\n\t},\n\n\t_disableOptgroup: function(index) {\n\t\t\tvar optGroupElem = this.list.find( 'li.' + this.widgetBaseClass + '-group-' + index );\n\t\t\tif (optGroupElem) {\n\t\t\t\toptGroupElem.addClass(this.namespace + '-state-disabled')\n\t\t\t\t\t.attr(\"aria-disabled\", true);\n\t\t\t\tthis.element.find(\"optgroup\").eq(index).attr(\"disabled\", \"disabled\");\n\t\t\t}\n\t},\n\n\t_enableOptgroup: function(index) {\n\t\t\tvar optGroupElem = this.list.find( 'li.' + this.widgetBaseClass + '-group-' + index );\n\t\t\tif (optGroupElem) {\n\t\t\t\toptGroupElem.removeClass(this.namespace + '-state-disabled')\n\t\t\t\t\t.attr(\"aria-disabled\", false);\n\t\t\t\tthis.element.find(\"optgroup\").eq(index).removeAttr(\"disabled\");\n\t\t\t}\n\t},\n\n\tindex: function(newValue) {\n\t\tif (arguments.length) {\n\t\t\tif (!this._disabled($(this._optionLis[newValue]))) {\n\t\t\t\tthis.element[0].selectedIndex = newValue;\n\t\t\t\tthis._refreshValue();\n\t\t\t} else {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t} else {\n\t\t\treturn this._selectedIndex();\n\t\t}\n\t},\n\n\tvalue: function(newValue) {\n\t\tif (arguments.length) {\n\t\t\tthis.element[0].value = newValue;\n\t\t\tthis._refreshValue();\n\t\t} else {\n\t\t\treturn this.element[0].value;\n\t\t}\n\t},\n\n\t_refreshValue: function() {\n\t\tvar activeClass = (this.options.style == \"popup\") ? \" ui-state-active\" : \"\";\n\t\tvar activeID = this.widgetBaseClass + '-item-' + Math.round(Math.random() * 1000);\n\t\t// deselect previous\n\t\tthis.list\n\t\t\t.find('.' + this.widgetBaseClass + '-item-selected')\n\t\t\t.removeClass(this.widgetBaseClass + \"-item-selected\" + activeClass)\n\t\t\t.find('a')\n\t\t\t.attr('aria-selected', 'false')\n\t\t\t.attr('id', '');\n\t\t// select new\n\t\tthis._selectedOptionLi()\n\t\t\t.addClass(this.widgetBaseClass + \"-item-selected\" + activeClass)\n\t\t\t.find('a')\n\t\t\t.attr('aria-selected', 'true')\n\t\t\t.attr('id', activeID);\n\n\t\t// toggle any class brought in from option\n\t\tvar currentOptionClasses = (this.newelement.data('optionClasses') ? this.newelement.data('optionClasses') : \"\");\n\t\tvar newOptionClasses = (this._selectedOptionLi().data('optionClasses') ? this._selectedOptionLi().data('optionClasses') : \"\");\n\t\tthis.newelement\n\t\t\t.removeClass(currentOptionClasses)\n\t\t\t.data('optionClasses', newOptionClasses)\n\t\t\t.addClass( newOptionClasses )\n\t\t\t.find('.' + this.widgetBaseClass + '-status')\n\t\t\t.html(\n\t\t\t\tthis._selectedOptionLi()\n\t\t\t\t\t.find('a:eq(0)')\n\t\t\t\t\t.html()\n\t\t\t);\n\n\t\tthis.list.attr('aria-activedescendant', activeID);\n\t},\n\n\t_refreshPosition: function() {\n\t\tvar o = this.options;\n\t\t// if its a native pop-up we need to calculate the position of the selected li\n\t\tif (o.style == \"popup\" && !o.positionOptions.offset) {\n\t\t\tvar selected = this._selectedOptionLi();\n\t\t\tvar _offset = \"0 -\" + (selected.outerHeight() + selected.offset().top - this.list.offset().top);\n\t\t}\n\t\t// update zIndex if jQuery UI is able to process\n\t\tvar zIndexElement = this.element.zIndex();\n\t\tif (zIndexElement) {\n\t\t\tthis.list.css({\n\t\t\t\tzIndex: zIndexElement\n\t\t\t});\n\t\t}\n\t\tthis.list.position({\n\t\t\t\t// set options for position plugin\n\t\t\t\tof: o.positionOptions.of || this.newelement,\n\t\t\t\tmy: o.positionOptions.my,\n\t\t\t\tat: o.positionOptions.at,\n\t\t\t\toffset: o.positionOptions.offset || _offset,\n\t\t\t\tcollision: o.positionOptions.collision || 'flip'\n\t\t\t});\n\t}\n});\n\n})(jQuery);\n"}}
-{"repo": "Trepan-Debuggers/remake", "pr_number": 149, "title": "Enable --debugger-stop with -X", "state": "closed", "merged_at": "2023-08-28T03:36:30Z", "additions": 38, "deletions": 39, "files_changed": ["src/main.c"], "files_before": {"src/main.c": "/* Argument parsing and main program of GNU Make.\nCopyright (C) 1988-2020 Free Software Foundation, Inc.\nCopyright (C) 2015, 2017 Rocky Bernstein\nThis file is part of GNU Make.\n\nGNU Make is free software; you can redistribute it and/or modify it under the\nterms of the GNU General Public License as published by the Free Software\nFoundation; either version 3 of the License, or (at your option) any later\nversion.\n\nGNU Make is distributed in the hope that it will be useful, but WITHOUT ANY\nWARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR\nA PARTICULAR PURPOSE. See the GNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License along with\nthis program. If not, see . */\n\n#include \"makeint.h\"\n#include \"globals.h\"\n#include \"profile.h\"\n#include \"os.h\"\n#include \"filedef.h\"\n#include \"dep.h\"\n#include \"variable.h\"\n#include \"job.h\"\n#include \"commands.h\"\n#include \"rule.h\"\n#include \"debug.h\"\n#include \"getopt.h\"\n// debugger include(s)\n#include \"cmd.h\"\n\n#include \n#ifdef WINDOWS32\n# include \n# include \n#ifdef HAVE_STRINGS_H\n# include \t/* for strcasecmp */\n#endif\n# include \"pathstuff.h\"\n# include \"sub_proc.h\"\n# include \"w32err.h\"\n#endif\n#ifdef HAVE_FCNTL_H\n# include \n#endif\n\nstruct goaldep *read_makefiles;\n\nextern void initialize_stopchar_map ();\n\n#if defined HAVE_WAITPID || defined HAVE_WAIT3\n# define HAVE_WAIT_NOHANG\n#endif\n\n#ifndef HAVE_UNISTD_H\nint chdir ();\n#endif\n#ifndef STDC_HEADERS\n# ifndef sun /* Sun has an incorrect decl in a header. */\nvoid exit (int) NORETURN;\n# endif\ndouble atof ();\n#endif\n\nstatic void clean_jobserver (int status);\nstatic void print_data_base (void);\nvoid print_rule_data_base (bool b_verbose);\nstatic void print_version (void);\nstatic void decode_switches (int argc, const char **argv, int env);\nstatic void decode_env_switches (const char *envar, size_t len);\nstatic struct variable *define_makeflags (int all, int makefile);\nstatic char *quote_for_env (char *out, const char *in);\nstatic void initialize_global_hash_tables (void);\n\n\f\n/* The structure that describes an accepted command switch. */\n\nstruct command_switch\n {\n int c; /* The switch character. */\n\n enum /* Type of the value. */\n {\n flag, /* Turn int flag on. */\n flag_off, /* Turn int flag off. */\n string, /* One string per invocation. */\n strlist, /* One string per switch. */\n filename, /* A string containing a file name. */\n positive_int, /* A positive integer. */\n floating, /* A floating-point number (double). */\n ignore /* Ignored. */\n } type;\n\n void *value_ptr; /* Pointer to the value-holding variable. */\n\n unsigned int env:1; /* Can come from MAKEFLAGS. */\n unsigned int toenv:1; /* Should be put in MAKEFLAGS. */\n unsigned int no_makefile:1; /* Don't propagate when remaking makefiles. */\n\n const void *noarg_value; /* Pointer to value used if no arg given. */\n const void *default_value; /* Pointer to default value. */\n\n const char *long_name; /* Long option name. */\n };\n\n/* True if C is a switch value that corresponds to a short option. */\n\n#define short_option(c) ((c) <= CHAR_MAX)\n\n/* The structure used to hold the list of strings given\n in command switches of a type that takes strlist arguments. */\n\n/* The recognized command switches. */\n\nstatic const int default_silent_flag = 0;\n\n/* Nonzero means either -s was given, or .SILENT-with-no-deps was seen. */\n\nint run_silent = 0;\n\n/*! If non-null, contains the type of tracing we are to do.\n This is coordinated with tracing_flag. */\nstringlist_t *tracing_opts = NULL;\n\n/*! If true, show version information on entry. */\nbool b_show_version = false;\n\n/*! If true, go into debugger on error.\nSets --debugger --debugger-stop=error. */\nint post_mortem_flag = 0;\n\n/*! Nonzero means use GNU readline in the debugger. */\nint use_readline_flag =\n#ifdef HAVE_LIBREADLINE\n 1\n#else\n 0\n#endif\n ;\n\n/*! If nonzero, the basename of filenames is in giving locations. Normally,\n giving a file directory location helps a debugger frontend\n when we change directories. For regression tests it is helpful to\n list just the basename part as that doesn't change from installation\n to installation. Users may have their preferences too.\n*/\nint basename_filenames = 0;\n\n/* Synchronize output (--output-sync). */\n\nchar *output_sync_option = 0;\n\n/* Specify profile output formatting (--profile) */\n\nchar *profile_option = 0;\n\n/* Specify the output directory for profiling information */\n\nstatic struct stringlist *profile_dir_opt = 0;\n\n/* Output level (--verbosity). */\n\nstatic struct stringlist *verbosity_opts;\n\n/* Environment variables override makefile definitions. */\n\n/* Nonzero means keep going even if remaking some file fails (-k). */\n\nint keep_going_flag;\nstatic const int default_keep_going_flag = 0;\n\n/*! Nonzero gives a list of explicit target names that have commands\n AND comments associated with them and exits. Set by option --task-comments\n */\n\nint show_task_comments_flag = 0;\n\n/* Nonzero means ignore print_directory and never print the directory.\n This is necessary because print_directory is set implicitly. */\n\nint inhibit_print_directory = 0;\n\n/* List of makefiles given with -f switches. */\n\nstatic struct stringlist *makefiles = 0;\n\n/* Size of the stack when we started. */\n\n#ifdef SET_STACK_SIZE\nstruct rlimit stack_limit;\n#endif\n\n\n/* Number of job slots for parallelism. */\n\nunsigned int job_slots;\n\n#define INVALID_JOB_SLOTS (-1)\nstatic unsigned int master_job_slots = 0;\nstatic int arg_job_slots = INVALID_JOB_SLOTS;\n\nstatic const int default_job_slots = INVALID_JOB_SLOTS;\n\n/* Value of job_slots that means no limit. */\n\nstatic const int inf_jobs = 0;\n\n/* Authorization for the jobserver. */\n\nstatic char *jobserver_auth = NULL;\n\n/* Handle for the mutex used on Windows to synchronize output of our\n children under -O. */\n\nchar *sync_mutex = NULL;\n\n/* Maximum load average at which multiple jobs will be run.\n Negative values mean unlimited, while zero means limit to\n zero load (which could be useful to start infinite jobs remotely\n but one at a time locally). */\ndouble max_load_average = -1.0;\ndouble default_load_average = -1.0;\n\n/* List of directories given with -C switches. */\n\nstatic struct stringlist *directories = 0;\n\n/* List of include directories given with -I switches. */\n\nstatic struct stringlist *include_directories = 0;\n\n/* List of files given with -o switches. */\n\nstatic struct stringlist *old_files = 0;\n\n/* List of files given with -W switches. */\n\nstatic struct stringlist *new_files = 0;\n\n/* List of strings to be eval'd. */\nstatic struct stringlist *eval_strings = 0;\n\n/* If nonzero, we should just print usage and exit. */\n\nstatic int print_usage_flag = 0;\n\n/*! Do we want to go into a debugger or not?\n Values are \"error\" - enter on errors or fatal errors\n \"fatal\" - enter on fatal errors\n \"goal\" - set to enter debugger before updating goal\n \"preread\" - set to enter debugger before reading makefile(s)\n \"preaction\" - set to enter debugger before performing any\n actions(s)\n \"full\" - \"enter\" + \"error\" + \"fatal\"\n*/\nstatic stringlist_t* debugger_opts = NULL;\n\n/* If nonzero, always build all targets, regardless of whether\n they appear out of date or not. */\nstatic int always_make_set = 0;\nint always_make_flag = 0;\n\n/* If nonzero, we're in the \"try to rebuild makefiles\" phase. */\n\nint rebuilding_makefiles = 0;\n\n\f\n/* The usage output. We write it this way to make life easier for the\n translators, especially those trying to translate to right-to-left\n languages like Hebrew. */\n\nstatic const char *const usage[] =\n {\n N_(\"Options:\\n\"),\n N_(\"\\\n -b, -m Ignored for compatibility.\\n\"),\n N_(\"\\\n -B, --always-make Unconditionally make all targets.\\n\"),\n N_(\"\\\n -c, --search-parent Search parent directories for Makefile.\\n\"),\n N_(\"\\\n -C DIRECTORY, --directory=DIRECTORY\\n\\\n Change to DIRECTORY before doing anything.\\n\"),\n N_(\"\\\n -d Print lots of debugging information.\\n\"),\n N_(\"\\\n --debug[=FLAGS] Print various types of debugging information.\\n\"),\n N_(\"\\\n -e, --environment-overrides\\n\\\n Environment variables override makefiles.\\n\"),\n N_(\"\\\n -E STRING, --eval=STRING Evaluate STRING as a makefile statement.\\n\"),\n N_(\"\\\n -f FILE, --file=FILE, --makefile=FILE\\n\\\n Read FILE as a makefile.\\n\"),\n N_(\"\\\n -h, --help Print this message and exit.\\n\"),\n N_(\"\\\n -i, --ignore-errors Ignore errors from recipes.\\n\"),\n N_(\"\\\n -I DIRECTORY, --include-dir=DIRECTORY\\n\\\n Search DIRECTORY for included makefiles.\\n\"),\n N_(\"\\\n -j [N], --jobs[=N] Allow N jobs at once; infinite jobs with no arg.\\n\"),\n N_(\"\\\n -k, --keep-going Keep going when some targets can't be made.\\n\"),\n N_(\"\\\n -l [N], --load-average[=N], --max-load[=N]\\n\\\n Don't start multiple jobs unless load is below N.\\n\"),\n N_(\"\\\n -L, --check-symlink-times Use the latest mtime between symlinks and target.\\n\"),\n N_(\"\\\n --no-extended-errors Do not give additional error reporting.\\n\"),\n N_(\"\\\n -n, --just-print, --dry-run, --recon\\n\\\n Don't actually run any recipe; just print them.\\n\"),\n N_(\"\\\n -o FILE, --old-file=FILE, --assume-old=FILE\\n\\\n Consider FILE to be very old and don't remake it.\\n\"),\n N_(\"\\\n -O[TYPE], --output-sync[=TYPE]\\n\\\n Synchronize output of parallel jobs by TYPE.\\n\"),\n N_(\"\\\n -p, --print-data-base Print make's internal database.\\n\"),\n N_(\"\\\n -P, --profile[=FORMAT] Print profiling information for each target using FORMAT.\\n\\\n If FORMAT isn't specified, default to \\\"callgrind\\\"\\n\"),\n N_(\"\\\n --profile-directory=DIR Output profiling data to the DIR directory.\\n\"),\n N_(\"\\\n -q, --question Run no recipe; exit status says if up to date.\\n\"),\n N_(\"\\\n -r, --no-builtin-rules Disable the built-in implicit rules.\\n\"),\n N_(\"\\\n -R, --no-builtin-variables Disable the built-in variable settings.\\n\"),\n N_(\"\\\n -s, --silent, --quiet Don't echo recipes.\\n\"),\n N_(\"\\\n --no-silent Echo recipes (disable --silent mode).\\n\"),\n N_(\"\\\n -S, --no-keep-going, --stop\\n\\\n Turns off -k.\\n\"),\n N_(\"\\\n --targets Give list of explicitly-named targets.\\n\"),\n N_(\"\\\n --tasks Give list of targets which have descriptions\\n\\\n associated with them.\\n\"),\n N_(\"\\\n -t, --touch Touch targets instead of remaking them.\\n\"),\n N_(\"\\\n -v, --version Print the version number of make and exit.\\n\"),\n N_(\"\\\n --verbosity=LEVEL Set verbosity level. LEVEL may be \\\"terse\\\" \\\"no-header\\\" or\\n\\\n \\\"full\\\". The default is \\\"full\\\".\\n\"),\n N_(\"\\\n -w, --print-directory Print the current directory.\\n\"),\n N_(\"\\\n --no-print-directory Turn off -w, even if it was turned on implicitly.\\n\"),\n N_(\"\\\n -W FILE, --what-if=FILE, --new-file=FILE, --assume-new=FILE\\n\\\n Consider FILE to be infinitely new.\\n\"),\n N_(\"\\\n --warn-undefined-variables Warn when an undefined variable is referenced.\\n\"),\n N_(\"\\\n -x, --trace[=TYPE] Trace command execution TYPE may be\\n\\\n \\\"command\\\", \\\"read\\\", \\\"normal\\\".\\\"\\n\\\n \\\"noshell\\\", or \\\"full\\\". Default is \\\"normal\\\"\\n\"),\n N_(\"\\\n --debugger-stop[=TYPE] Which point to enter debugger. TYPE may be\\n\\\n \\\"goal\\\", \\\"preread\\\", \\\"preaction\\\",\\n\\\n \\\"full\\\", \\\"error\\\", or \\\"fatal\\\".\\n\\\n Only makes sense with -X set.\\n\"),\n N_(\"\\\n -v, --version Print the version number of make and exit.\\n\"),\n N_(\"\\\n -X, --debugger Enter debugger.\\n\"),\n N_(\"\\\n -!, --post-mortem Go into debugger on error.\\n\\\n Same as --debugger --debugger-stop=error\\n\"),\n N_(\"\\\n --no-readline Do not use GNU ReadLine in debugger.\\n\"),\n NULL\n };\n\n/* The table of command switches.\n Order matters here: this is the order MAKEFLAGS will be constructed.\n So be sure all simple flags (single char, no argument) come first. */\n\nstatic const struct command_switch switches[] =\n {\n { 'b', ignore, 0, 0, 0, 0, 0, 0, 0 },\n { 'B', flag, &always_make_set, 1, 1, 0, 0, 0, \"always-make\" },\n { 'c', flag, &search_parent_flag, 1, 1, 0, 0, 0, \"search-parent\" },\n { 'd', flag, &debug_flag, 1, 1, 0, 0, 0, 0 },\n { 'e', flag, &env_overrides, 1, 1, 0, 0, 0, \"environment-overrides\", },\n { 'h', flag, &print_usage_flag, 0, 0, 0, 0, 0, \"help\" },\n { 'i', flag, &ignore_errors_flag, 1, 1, 0, 0, 0, \"ignore-errors\" },\n { 'k', flag, &keep_going_flag, 1, 1, 0, 0, &default_keep_going_flag,\n \"keep-going\" },\n { 'L', flag, &check_symlink_flag, 1, 1, 0, 0, 0, \"check-symlink-times\" },\n { 'm', ignore, 0, 0, 0, 0, 0, 0, 0 },\n { 'n', flag, &just_print_flag, 1, 1, 1, 0, 0, \"just-print\" },\n { 'p', flag, &print_data_base_flag, 1, 1, 0, 0, 0, \"print-data-base\" },\n { 'P', string, &profile_option, 1, 1, 0, \"callgrind\", 0, \"profile\" },\n { 'q', flag, &question_flag, 1, 1, 1, 0, 0, \"question\" },\n { 'r', flag, &no_builtin_rules_flag, 1, 1, 0, 0, 0, \"no-builtin-rules\" },\n { 'R', flag, &no_builtin_variables_flag, 1, 1, 0, 0, 0,\n \"no-builtin-variables\" },\n { 's', flag, &silent_flag, 1, 1, 0, 0, &default_silent_flag, \"silent\" },\n { 'S', flag_off, &keep_going_flag, 1, 1, 0, 0, &default_keep_going_flag,\n \"no-keep-going\" },\n { 't', flag, &touch_flag, 1, 1, 1, 0, 0, \"touch\" },\n { 'v', flag, &print_version_flag, 1, 1, 0, 0, 0, \"version\" },\n { 'w', flag, &print_directory, 1, 1, 0, 0, 0, \"print-directory\" },\n { 'X', flag, &debugger_flag, 1, 1, 0, 0, 0, \"debugger\" },\n { '!', flag, &post_mortem_flag, 1, 1, 0, 0, 0, \"post-mortem\" },\n\n /* These options take arguments. */\n { 'C', filename, &directories, 0, 0, 0, 0, 0, \"directory\" },\n { 'E', strlist, &eval_strings, 1, 0, 0, 0, 0, \"eval\" },\n { 'f', filename, &makefiles, 0, 0, 0, 0, 0, \"file\" },\n { 'I', filename, &include_directories, 1, 1, 0, 0, 0,\n \"include-dir\" },\n { 'j', positive_int, &arg_job_slots, 1, 1, 0, &inf_jobs, &default_job_slots,\n \"jobs\" },\n { 'l', floating, &max_load_average, 1, 1, 0, &default_load_average,\n &default_load_average, \"load-average\" },\n { 'o', filename, &old_files, 0, 0, 0, 0, 0, \"old-file\" },\n { 'O', string, &output_sync_option, 1, 1, 0, \"target\", 0, \"output-sync\" },\n { 'W', filename, &new_files, 0, 0, 0, 0, 0, \"what-if\" },\n { 'x', strlist, &tracing_opts, 1, 1, 0, \"normal\", 0, \"trace\" },\n\n /* These are long-style options. */\n { CHAR_MAX+1, strlist, &db_flags, 1, 1, 0, \"basic\", 0, \"debug\" },\n { CHAR_MAX+2, string, &jobserver_auth, 1, 1, 0, 0, 0, \"jobserver-auth\" },\n { CHAR_MAX+3, flag, &show_tasks_flag, 0, 0, 0, 0, 0, \"tasks\" },\n { CHAR_MAX+4, flag, &inhibit_print_directory, 1, 1, 0, 0, 0,\n \"no-print-directory\" },\n { CHAR_MAX+5, flag, &warn_undefined_variables_flag, 1, 1, 0, 0, 0,\n \"warn-undefined-variables\" },\n { CHAR_MAX+7, string, &sync_mutex, 1, 1, 0, 0, 0, \"sync-mutex\" },\n { CHAR_MAX+8, flag_off, &silent_flag, 1, 1, 0, 0, &default_silent_flag, \"no-silent\" },\n { CHAR_MAX+9, string, &jobserver_auth, 1, 0, 0, 0, 0, \"jobserver-fds\" },\n { CHAR_MAX+10, strlist, &verbosity_opts, 1, 1, 0, 0, 0,\n \"verbosity\" },\n { CHAR_MAX+11, flag, (char *) &no_extended_errors, 1, 1, 0, 0, 0,\n \"no-extended-errors\", },\n { CHAR_MAX+12, flag_off, (char *) &use_readline_flag, 1, 0, 0, 0, 0,\n \"no-readline\", },\n { CHAR_MAX+13, flag, &show_targets_flag, 0, 0, 0, 0, 0,\n \"targets\" },\n { CHAR_MAX+14, strlist, &debugger_opts, 1, 1, 0, \"preaction\", 0,\n \"debugger-stop\" },\n { CHAR_MAX+15, filename, &profile_dir_opt, 1, 1, 0, 0, 0, \"profile-directory\" },\n { 0, 0, 0, 0, 0, 0, 0, 0, 0 }\n };\n\n/* Secondary long names for options. */\n\nstatic struct option long_option_aliases[] =\n {\n { \"quiet\", no_argument, 0, 's' },\n { \"stop\", no_argument, 0, 'S' },\n { \"new-file\", required_argument, 0, 'W' },\n { \"assume-new\", required_argument, 0, 'W' },\n { \"assume-old\", required_argument, 0, 'o' },\n { \"max-load\", optional_argument, 0, 'l' },\n { \"dry-run\", no_argument, 0, 'n' },\n { \"recon\", no_argument, 0, 'n' },\n { \"makefile\", required_argument, 0, 'f' },\n };\n\n/* List of goal targets. */\n\nstatic struct goaldep *goals, *lastgoal;\n\n/* List of variables which were defined on the command line\n (or, equivalently, in MAKEFLAGS). */\n\nstruct command_variable\n {\n struct command_variable *next;\n struct variable *variable;\n };\nstatic struct command_variable *command_variables;\n\n/*! Value of argv[0] which seems to get modified. Can we merge this with\n program below? */\nchar *argv0 = NULL;\n\n/*! The name we were invoked with. */\n\n/*! Our initial arguments -- used for debugger restart execvp. */\nconst char * const*global_argv;\n\n/*! Our current directory before processing any -C options. */\nchar *directory_before_chdir = NULL;\n\n/*! Pointer to the value of the .DEFAULT_GOAL special variable.\n The value will be the name of the goal to remake if the command line\n does not override it. It can be set by the makefile, or else it's\n the first target defined in the makefile whose name does not start\n with '.'. */\nstruct variable * default_goal_var;\n\n/*! Pointer to structure for the file .DEFAULT\n whose commands are used for any file that has none of its own.\n This is zero if the makefiles do not define .DEFAULT. */\nstruct file *default_file;\n\n/* Nonzero if we have seen the '.SECONDEXPANSION' target.\n This turns on secondary expansion of prerequisites. */\n\nint second_expansion;\n\n/* Nonzero if we have seen the '.ONESHELL' target.\n This causes the entire recipe to be handed to SHELL\n as a single string, potentially containing newlines. */\n\nint one_shell;\n\n/* Nonzero if we have seen the '.NOTPARALLEL' target.\n This turns off parallel builds for this invocation of make. */\n\nint not_parallel;\n\n/* Nonzero if some rule detected clock skew; we keep track so (a) we only\n print one warning about it during the run, and (b) we can print a final\n warning at the end of the run. */\n\nint clock_skew_detected;\n\n/* If output-sync is enabled we'll collect all the output generated due to\n options, while reading makefiles, etc. */\n\nstruct output make_sync;\n\n\f\n/* Mask of signals that are being caught with fatal_error_signal. */\n\n#if defined(POSIX)\nsigset_t fatal_signal_set;\n#elif defined(HAVE_SIGSETMASK)\nint fatal_signal_mask;\n#endif\n\n#if !HAVE_DECL_BSD_SIGNAL && !defined bsd_signal\n# if !defined HAVE_SIGACTION\n# define bsd_signal signal\n# else\ntypedef RETSIGTYPE (*bsd_signal_ret_t) (int);\n\nstatic bsd_signal_ret_t\nbsd_signal (int sig, bsd_signal_ret_t func)\n{\n struct sigaction act, oact;\n act.sa_handler = func;\n act.sa_flags = SA_RESTART;\n sigemptyset (&act.sa_mask);\n sigaddset (&act.sa_mask, sig);\n if (sigaction (sig, &act, &oact) != 0)\n return SIG_ERR;\n return oact.sa_handler;\n}\n# endif\n#endif\n\nvoid\ndecode_trace_flags (stringlist_t *ppsz_tracing_opts)\n{\n if (ppsz_tracing_opts) {\n const char **p;\n db_level |= (DB_TRACE | DB_SHELL);\n if (!ppsz_tracing_opts->list)\n db_level |= (DB_BASIC);\n else\n for (p = ppsz_tracing_opts->list; *p != 0; ++p) {\n if (0 == strcmp(*p, \"command\"))\n ;\n else if (0 == strcmp(*p, \"full\"))\n db_level |= (DB_VERBOSE|DB_READ_MAKEFILES);\n else if (0 == strcmp(*p, \"normal\"))\n db_level |= DB_BASIC;\n else if (0 == strcmp(*p, \"noshell\"))\n db_level = DB_BASIC | DB_TRACE;\n else if (0 == strcmp(*p, \"read\"))\n db_level |= DB_READ_MAKEFILES;\n else\n OS ( fatal, NILF, _(\"unknown trace command execution type `%s'\"), *p);\n }\n }\n}\n\nvoid\ndecode_verbosity_flags (stringlist_t *ppsz_verbosity_opts)\n{\n if (ppsz_verbosity_opts) {\n const char **p;\n if (ppsz_verbosity_opts->list)\n for (p = ppsz_verbosity_opts->list; *p != 0; ++p) {\n if (0 == strcmp(*p, \"no-header\"))\n b_show_version = false;\n else if (0 == strcmp(*p, \"full\")) {\n db_level |= (DB_VERBOSE);\n\t b_show_version = true;\n\t} else if (0 == strcmp(*p, \"terse\")) {\n\t db_level &= (~DB_VERBOSE);\n\t b_show_version = false;\n\t}\n }\n }\n}\n\nstatic void\ninitialize_global_hash_tables (void)\n{\n init_hash_global_variable_set ();\n strcache_init ();\n init_hash_files ();\n hash_init_directories ();\n hash_init_function_table ();\n}\n\n/* This character map locate stop chars when parsing GNU makefiles.\n Each element is true if we should stop parsing on that character. */\n\nstatic const char *\nexpand_command_line_file (const char *name)\n{\n const char *cp;\n char *expanded = 0;\n\n if (name[0] == '\\0')\n O (fatal, NILF, _(\"empty string invalid as file name\"));\n\n if (name[0] == '~')\n {\n expanded = remake_tilde_expand (name);\n if (expanded && expanded[0] != '\\0')\n name = expanded;\n }\n\n /* This is also done in parse_file_seq, so this is redundant\n for names read from makefiles. It is here for names passed\n on the command line. */\n while (name[0] == '.' && name[1] == '/')\n {\n name += 2;\n while (name[0] == '/')\n /* Skip following slashes: \".//foo\" is \"foo\", not \"/foo\". */\n ++name;\n }\n\n if (name[0] == '\\0')\n {\n /* Nothing else but one or more \"./\", maybe plus slashes! */\n name = \"./\";\n }\n\n cp = strcache_add (name);\n\n free (expanded);\n\n return cp;\n}\n\n/* Toggle -d on receipt of SIGUSR1. */\n\n#ifdef SIGUSR1\nstatic RETSIGTYPE\ndebug_signal_handler (int sig UNUSED)\n{\n db_level = db_level ? DB_NONE : DB_BASIC;\n}\n#endif\n\nstatic void\ndecode_debug_flags (void)\n{\n const char **pp;\n\n if (debug_flag)\n db_level = DB_ALL;\n\n if (db_flags)\n for (pp=db_flags->list; *pp; ++pp)\n {\n const char *p = *pp;\n\n while (1)\n {\n switch (tolower (p[0]))\n {\n case 'a':\n db_level |= DB_ALL;\n break;\n case 'b':\n db_level |= DB_BASIC;\n break;\n case 'i':\n db_level |= DB_BASIC | DB_IMPLICIT;\n break;\n case 'j':\n db_level |= DB_JOBS;\n break;\n case 'm':\n db_level |= DB_BASIC | DB_MAKEFILES;\n break;\n case 'n':\n db_level = 0;\n break;\n case 'v':\n db_level |= DB_BASIC | DB_VERBOSE;\n break;\n default:\n OS (fatal, NILF,\n _(\"unknown debug level specification '%s'\"), p);\n }\n\n while (*(++p) != '\\0')\n if (*p == ',' || *p == ' ')\n {\n ++p;\n break;\n }\n\n if (*p == '\\0')\n break;\n }\n }\n\n if (db_level)\n verify_flag = 1;\n\n if (! db_level)\n debug_flag = 0;\n}\n\nstatic void\ndecode_output_sync_flags (void)\n{\n#ifdef NO_OUTPUT_SYNC\n output_sync = OUTPUT_SYNC_NONE;\n#else\n if (output_sync_option)\n {\n if (streq (output_sync_option, \"none\"))\n output_sync = OUTPUT_SYNC_NONE;\n else if (streq (output_sync_option, \"line\"))\n output_sync = OUTPUT_SYNC_LINE;\n else if (streq (output_sync_option, \"target\"))\n output_sync = OUTPUT_SYNC_TARGET;\n else if (streq (output_sync_option, \"recurse\"))\n output_sync = OUTPUT_SYNC_RECURSE;\n else\n OS (fatal, NILF,\n _(\"unknown output-sync type '%s'\"), output_sync_option);\n }\n\n if (sync_mutex)\n RECORD_SYNC_MUTEX (sync_mutex);\n#endif\n}\n\nvoid\ndecode_profile_options(void)\n{\n if (profile_option)\n {\n if (streq (profile_option, \"callgrind\"))\n profile_flag = PROFILE_CALLGRIND;\n else if (streq (profile_option, \"json\"))\n profile_flag = PROFILE_JSON;\n else\n profile_flag = PROFILE_DISABLED;\n }\n else\n {\n profile_flag = PROFILE_DISABLED;\n }\n\n if (profile_dir_opt == NULL)\n {\n profile_directory = starting_directory;\n }\n else\n {\n const char *dir = profile_dir_opt->list[profile_dir_opt->idx - 1];\n if (dir[0] != '/') {\n char directory[GET_PATH_MAX];\n sprintf(directory, \"%s/%s\", starting_directory, dir);\n profile_dir_opt->list[profile_dir_opt->idx - 1] = strcache_add(directory);\n }\n profile_directory = profile_dir_opt->list[profile_dir_opt->idx - 1];\n }\n}\n\n#ifdef WINDOWS32\n\n#ifndef NO_OUTPUT_SYNC\n\n/* This is called from start_job_command when it detects that\n output_sync option is in effect. The handle to the synchronization\n mutex is passed, as a string, to sub-makes via the --sync-mutex\n command-line argument. */\nvoid\nprepare_mutex_handle_string (sync_handle_t handle)\n{\n if (!sync_mutex)\n {\n /* Prepare the mutex handle string for our children. */\n /* 2 hex digits per byte + 2 characters for \"0x\" + null. */\n sync_mutex = xmalloc ((2 * sizeof (sync_handle_t)) + 2 + 1);\n sprintf (sync_mutex, \"0x%Ix\", handle);\n define_makeflags (1, 0);\n }\n}\n\n#endif /* NO_OUTPUT_SYNC */\n\n/*\n * HANDLE runtime exceptions by avoiding a requestor on the GUI. Capture\n * exception and print it to stderr instead.\n *\n * If ! DB_VERBOSE, just print a simple message and exit.\n * If DB_VERBOSE, print a more verbose message.\n * If compiled for DEBUG, let exception pass through to GUI so that\n * debuggers can attach.\n */\nLONG WINAPI\nhandle_runtime_exceptions (struct _EXCEPTION_POINTERS *exinfo)\n{\n PEXCEPTION_RECORD exrec = exinfo->ExceptionRecord;\n LPSTR cmdline = GetCommandLine ();\n LPSTR prg = strtok (cmdline, \" \");\n CHAR errmsg[1024];\n#ifdef USE_EVENT_LOG\n HANDLE hEventSource;\n LPTSTR lpszStrings[1];\n#endif\n\n if (! ISDB (DB_VERBOSE))\n {\n sprintf (errmsg,\n _(\"%s: Interrupt/Exception caught (code = 0x%lx, addr = 0x%p)\\n\"),\n prg, exrec->ExceptionCode, exrec->ExceptionAddress);\n fprintf (stderr, errmsg);\n exit (255);\n }\n\n sprintf (errmsg,\n _(\"\\nUnhandled exception filter called from program %s\\nExceptionCode = %lx\\nExceptionFlags = %lx\\nExceptionAddress = 0x%p\\n\"),\n prg, exrec->ExceptionCode, exrec->ExceptionFlags,\n exrec->ExceptionAddress);\n\n if (exrec->ExceptionCode == EXCEPTION_ACCESS_VIOLATION\n && exrec->NumberParameters >= 2)\n sprintf (&errmsg[strlen(errmsg)],\n (exrec->ExceptionInformation[0]\n ? _(\"Access violation: write operation at address 0x%p\\n\")\n : _(\"Access violation: read operation at address 0x%p\\n\")),\n (PVOID)exrec->ExceptionInformation[1]);\n\n /* turn this on if we want to put stuff in the event log too */\n#ifdef USE_EVENT_LOG\n hEventSource = RegisterEventSource (NULL, \"GNU Make\");\n lpszStrings[0] = errmsg;\n\n if (hEventSource != NULL)\n {\n ReportEvent (hEventSource, /* handle of event source */\n EVENTLOG_ERROR_TYPE, /* event type */\n 0, /* event category */\n 0, /* event ID */\n NULL, /* current user's SID */\n 1, /* strings in lpszStrings */\n 0, /* no bytes of raw data */\n lpszStrings, /* array of error strings */\n NULL); /* no raw data */\n\n (VOID) DeregisterEventSource (hEventSource);\n }\n#endif\n\n /* Write the error to stderr too */\n fprintf (stderr, errmsg);\n\n#ifdef DEBUG\n return EXCEPTION_CONTINUE_SEARCH;\n#else\n exit (255);\n return (255); /* not reached */\n#endif\n}\n\n/*\n * On WIN32 systems we don't have the luxury of a /bin directory that\n * is mapped globally to every drive mounted to the system. Since make could\n * be invoked from any drive, and we don't want to propagate /bin/sh\n * to every single drive. Allow ourselves a chance to search for\n * a value for default shell here (if the default path does not exist).\n */\n\nint\nfind_and_set_default_shell (const char *token)\n{\n int sh_found = 0;\n char *atoken = 0;\n const char *search_token;\n const char *tokend;\n PATH_VAR(sh_path);\n extern const char *default_shell;\n\n if (!token)\n search_token = default_shell;\n else\n search_token = atoken = xstrdup (token);\n\n /* If the user explicitly requests the DOS cmd shell, obey that request.\n However, make sure that's what they really want by requiring the value\n of SHELL either equal, or have a final path element of, \"cmd\" or\n \"cmd.exe\" case-insensitive. */\n tokend = search_token + strlen (search_token) - 3;\n if (((tokend == search_token\n || (tokend > search_token\n && (tokend[-1] == '/' || tokend[-1] == '\\\\')))\n && !strcasecmp (tokend, \"cmd\"))\n || ((tokend - 4 == search_token\n || (tokend - 4 > search_token\n && (tokend[-5] == '/' || tokend[-5] == '\\\\')))\n && !strcasecmp (tokend - 4, \"cmd.exe\")))\n {\n batch_mode_shell = 1;\n unixy_shell = 0;\n sprintf (sh_path, \"%s\", search_token);\n default_shell = xstrdup (w32ify (sh_path, 0));\n DB (DB_VERBOSE, (_(\"find_and_set_shell() setting default_shell = %s\\n\"),\n default_shell));\n sh_found = 1;\n }\n else if (!no_default_sh_exe\n && (token == NULL || !strcmp (search_token, default_shell)))\n {\n /* no new information, path already set or known */\n sh_found = 1;\n }\n else if (_access (search_token, 0) == 0)\n {\n /* search token path was found */\n sprintf (sh_path, \"%s\", search_token);\n default_shell = xstrdup (w32ify (sh_path, 0));\n DB (DB_VERBOSE, (_(\"find_and_set_shell() setting default_shell = %s\\n\"),\n default_shell));\n sh_found = 1;\n }\n else\n {\n char *p;\n struct variable *v = lookup_variable (STRING_SIZE_TUPLE (\"PATH\"));\n\n /* Search Path for shell */\n if (v && v->value)\n {\n char *ep;\n\n p = v->value;\n ep = strchr (p, PATH_SEPARATOR_CHAR);\n\n while (ep && *ep)\n {\n *ep = '\\0';\n\n sprintf (sh_path, \"%s/%s\", p, search_token);\n if (_access (sh_path, 0) == 0)\n {\n default_shell = xstrdup (w32ify (sh_path, 0));\n sh_found = 1;\n *ep = PATH_SEPARATOR_CHAR;\n\n /* terminate loop */\n p += strlen (p);\n }\n else\n {\n *ep = PATH_SEPARATOR_CHAR;\n p = ++ep;\n }\n\n ep = strchr (p, PATH_SEPARATOR_CHAR);\n }\n\n /* be sure to check last element of Path */\n if (p && *p)\n {\n sprintf (sh_path, \"%s/%s\", p, search_token);\n if (_access (sh_path, 0) == 0)\n {\n default_shell = xstrdup (w32ify (sh_path, 0));\n sh_found = 1;\n }\n }\n\n if (sh_found)\n DB (DB_VERBOSE,\n (_(\"find_and_set_shell() path search set default_shell = %s\\n\"),\n default_shell));\n }\n }\n\n /* naive test */\n if (!unixy_shell && sh_found\n && (strstr (default_shell, \"sh\") || strstr (default_shell, \"SH\")))\n {\n unixy_shell = 1;\n batch_mode_shell = 0;\n }\n\n#ifdef BATCH_MODE_ONLY_SHELL\n batch_mode_shell = 1;\n#endif\n\n free (atoken);\n\n return (sh_found);\n}\n#endif /* WINDOWS32 */\n\n#ifdef __MSDOS__\nstatic void\nmsdos_return_to_initial_directory (void)\n{\n if (directory_before_chdir)\n chdir (directory_before_chdir);\n}\n#endif /* __MSDOS__ */\n\nstatic void\nreset_jobserver (void)\n{\n jobserver_clear ();\n free (jobserver_auth);\n jobserver_auth = NULL;\n}\n\nint\nmain (int argc, const char **argv, char **envp)\n{\n static char *stdin_nm = 0;\n int makefile_status = MAKE_SUCCESS;\n PATH_VAR (current_directory);\n unsigned int restarts = 0;\n unsigned int syncing = 0;\n int argv_slots;\n#ifdef WINDOWS32\n const char *unix_path = NULL;\n const char *windows32_path = NULL;\n\n SetUnhandledExceptionFilter (handle_runtime_exceptions);\n\n /* start off assuming we have no shell */\n unixy_shell = 0;\n no_default_sh_exe = 1;\n#endif\n\n /* Useful for attaching debuggers, etc. */\n#ifdef SPIN\n SPIN (\"main-entry\");\n#endif\n\n argv0 = strdup(argv[0]);\n output_init (&make_sync);\n\n initialize_stopchar_map();\n\n#ifdef SET_STACK_SIZE\n /* Get rid of any avoidable limit on stack size. */\n {\n struct rlimit rlim;\n\n /* Set the stack limit huge so that alloca does not fail. */\n if (getrlimit (RLIMIT_STACK, &rlim) == 0\n && rlim.rlim_cur > 0 && rlim.rlim_cur < rlim.rlim_max)\n {\n stack_limit = rlim;\n rlim.rlim_cur = rlim.rlim_max;\n setrlimit (RLIMIT_STACK, &rlim);\n }\n else\n stack_limit.rlim_cur = 0;\n }\n#endif\n\n global_argv = argv;\n /* Needed for OS/2 */\n initialize_main (&argc, &argv);\n\n#ifdef MAKE_MAINTAINER_MODE\n /* In maintainer mode we always enable verification. */\n verify_flag = 1;\n#endif\n\n#if defined (__MSDOS__) && !defined (_POSIX_SOURCE)\n /* Request the most powerful version of 'system', to\n make up for the dumb default shell. */\n __system_flags = (__system_redirect\n | __system_use_shell\n | __system_allow_multiple_cmds\n | __system_allow_long_cmds\n | __system_handle_null_commands\n | __system_emulate_chdir);\n\n#endif\n\n /* Set up gettext/internationalization support. */\n setlocale (LC_ALL, \"\");\n /* The cast to void shuts up compiler warnings on systems that\n disable NLS. */\n (void)bindtextdomain (PACKAGE, LOCALEDIR);\n (void)textdomain (PACKAGE);\n\n#ifdef POSIX\n sigemptyset (&fatal_signal_set);\n#define ADD_SIG(sig) sigaddset (&fatal_signal_set, sig)\n#else\n#ifdef HAVE_SIGSETMASK\n fatal_signal_mask = 0;\n#define ADD_SIG(sig) fatal_signal_mask |= sigmask (sig)\n#else\n#define ADD_SIG(sig) (void)sig\n#endif\n#endif\n\n#define FATAL_SIG(sig) \\\n if (bsd_signal (sig, fatal_error_signal) == SIG_IGN) \\\n bsd_signal (sig, SIG_IGN); \\\n else \\\n ADD_SIG (sig);\n\n#ifdef SIGHUP\n FATAL_SIG (SIGHUP);\n#endif\n#ifdef SIGQUIT\n FATAL_SIG (SIGQUIT);\n#endif\n FATAL_SIG (SIGINT);\n FATAL_SIG (SIGTERM);\n\n#ifdef __MSDOS__\n /* Windows 9X delivers FP exceptions in child programs to their\n parent! We don't want Make to die when a child divides by zero,\n so we work around that lossage by catching SIGFPE. */\n FATAL_SIG (SIGFPE);\n#endif\n\n#ifdef SIGDANGER\n FATAL_SIG (SIGDANGER);\n#endif\n#ifdef SIGXCPU\n FATAL_SIG (SIGXCPU);\n#endif\n#ifdef SIGXFSZ\n FATAL_SIG (SIGXFSZ);\n#endif\n\n#undef FATAL_SIG\n\n /* Do not ignore the child-death signal. This must be done before\n any children could possibly be created; otherwise, the wait\n functions won't work on systems with the SVR4 ECHILD brain\n damage, if our invoker is ignoring this signal. */\n\n#ifdef HAVE_WAIT_NOHANG\n# if defined SIGCHLD\n (void) bsd_signal (SIGCHLD, SIG_DFL);\n# endif\n# if defined SIGCLD && SIGCLD != SIGCHLD\n (void) bsd_signal (SIGCLD, SIG_DFL);\n# endif\n#endif\n\n output_init (NULL);\n\n /* Figure out where this program lives. */\n\n if (argv[0] == 0)\n argv[0] = (char *)\"\";\n if (argv[0][0] == '\\0')\n program = \"make\";\n else\n {\n#if defined(HAVE_DOS_PATHS)\n const char* start = argv[0];\n\n /* Skip an initial drive specifier if present. */\n if (isalpha ((unsigned char)start[0]) && start[1] == ':')\n start += 2;\n\n if (start[0] == '\\0')\n program = \"make\";\n else\n {\n program = start + strlen (start);\n while (program > start && ! STOP_SET (program[-1], MAP_DIRSEP))\n --program;\n\n /* Remove the .exe extension if present. */\n {\n size_t len = strlen (program);\n if (len > 4 && streq (&program[len - 4], \".exe\"))\n program = xstrndup (program, len - 4);\n }\n }\n#else\n program = strrchr (argv[0], '/');\n if (program == 0)\n program = argv[0];\n else\n ++program;\n#endif\n }\n\n /* Set up to access user data (files). */\n user_access ();\n\n initialize_global_hash_tables ();\n\n /* Figure out where we are. */\n\n#ifdef WINDOWS32\n if (getcwd_fs (current_directory, GET_PATH_MAX) == 0)\n#else\n if (getcwd (current_directory, GET_PATH_MAX) == 0)\n#endif\n {\n#ifdef HAVE_GETCWD\n perror_with_name (\"getcwd\", \"\");\n#else\n OS (error, NILF, \"getwd: %s\", current_directory);\n#endif\n current_directory[0] = '\\0';\n directory_before_chdir = 0;\n }\n else\n directory_before_chdir = xstrdup (current_directory);\n\n#ifdef __MSDOS__\n /* Make sure we will return to the initial directory, come what may. */\n atexit (msdos_return_to_initial_directory);\n#endif\n\n /* Initialize the special variables. */\n define_variable_cname (\".VARIABLES\", \"\", o_default, 0)->special = 1;\n /* define_variable_cname (\".TARGETS\", \"\", o_default, 0)->special = 1; */\n define_variable_cname (\".RECIPEPREFIX\", \"\", o_default, 0)->special = 1;\n define_variable_cname (\".SHELLFLAGS\", \"-c\", o_default, 0);\n define_variable_cname (\".LOADED\", \"\", o_default, 0);\n\n /* Set up .FEATURES\n Use a separate variable because define_variable_cname() is a macro and\n some compilers (MSVC) don't like conditionals in macros. */\n {\n const char *features = \"target-specific order-only second-expansion\"\n \" else-if shortest-stem undefine oneshell nocomment\"\n \" grouped-target extra-prereqs\"\n#ifndef NO_ARCHIVES\n \" archives\"\n#endif\n#ifdef MAKE_JOBSERVER\n \" jobserver\"\n#endif\n#ifndef NO_OUTPUT_SYNC\n \" output-sync\"\n#endif\n#ifdef MAKE_SYMLINKS\n \" check-symlink\"\n#endif\n#ifdef HAVE_GUILE\n \" guile\"\n#endif\n#ifdef MAKE_LOAD\n \" load\"\n#endif\n#ifdef MAKE_MAINTAINER_MODE\n \" maintainer\"\n#endif\n ;\n\n define_variable_cname (\".FEATURES\", features, o_default, 0);\n }\n\n /* Configure GNU Guile support */\n guile_gmake_setup (NILF);\n\n /* Read in variables from the environment. It is important that this be\n done before $(MAKE) is figured out so its definitions will not be\n from the environment. */\n\n {\n unsigned int i;\n\n for (i = 0; envp[i] != 0; ++i)\n {\n struct variable *v;\n const char *ep = envp[i];\n /* By default, export all variables culled from the environment. */\n enum variable_export export = v_export;\n size_t len;\n\n while (! STOP_SET (*ep, MAP_EQUALS))\n ++ep;\n\n /* If there's no equals sign it's a malformed environment. Ignore. */\n if (*ep == '\\0')\n continue;\n\n /* Length of the variable name, and skip the '='. */\n len = ep++ - envp[i];\n\n /* If this is MAKE_RESTARTS, check to see if the \"already printed\n the enter statement\" flag is set. */\n if (len == 13 && strneq (envp[i], \"MAKE_RESTARTS\", 13))\n {\n if (*ep == '-')\n {\n OUTPUT_TRACED ();\n ++ep;\n }\n restarts = (unsigned int) atoi (ep);\n export = v_noexport;\n }\n\n v = define_variable (envp[i], len, ep, o_env, 1);\n\n /* POSIX says the value of SHELL set in the makefile won't change the\n value of SHELL given to subprocesses. */\n if (streq (v->name, \"SHELL\"))\n {\n export = v_noexport;\n shell_var.name = xstrdup (\"SHELL\");\n shell_var.length = 5;\n shell_var.value = xstrdup (ep);\n }\n\n v->export = export;\n }\n }\n\n /* Decode the switches. */\n decode_env_switches (STRING_SIZE_TUPLE (\"GNUMAKEFLAGS\"));\n\n /* Clear GNUMAKEFLAGS to avoid duplication. */\n define_variable_cname (\"GNUMAKEFLAGS\", \"\", o_env, 0);\n\n decode_env_switches (STRING_SIZE_TUPLE (\"MAKEFLAGS\"));\n\n#if 0\n /* People write things like:\n MFLAGS=\"CC=gcc -pipe\" \"CFLAGS=-g\"\n and we set the -p, -i and -e switches. Doesn't seem quite right. */\n decode_env_switches (STRING_SIZE_TUPLE (\"MFLAGS\"));\n#endif\n\n /* In output sync mode we need to sync any output generated by reading the\n makefiles, such as in $(info ...) or stderr from $(shell ...) etc. */\n\n syncing = make_sync.syncout = (output_sync == OUTPUT_SYNC_LINE\n || output_sync == OUTPUT_SYNC_TARGET);\n OUTPUT_SET (&make_sync);\n\n /* Parse the command line options. Remember the job slots set this way. */\n {\n int env_slots = arg_job_slots;\n arg_job_slots = INVALID_JOB_SLOTS;\n\n decode_switches (argc, (const char **)argv, 0);\n argv_slots = arg_job_slots;\n\n if (arg_job_slots == INVALID_JOB_SLOTS)\n arg_job_slots = env_slots;\n }\n\n /* Set a variable specifying whether stdout/stdin is hooked to a TTY. */\n#ifdef HAVE_ISATTY\n if (isatty (fileno (stdout)))\n if (! lookup_variable (STRING_SIZE_TUPLE (\"MAKE_TERMOUT\")))\n {\n const char *tty = TTYNAME (fileno (stdout));\n define_variable_cname (\"MAKE_TERMOUT\", tty ? tty : DEFAULT_TTYNAME,\n o_default, 0)->export = v_export;\n }\n if (isatty (fileno (stderr)))\n if (! lookup_variable (STRING_SIZE_TUPLE (\"MAKE_TERMERR\")))\n {\n const char *tty = TTYNAME (fileno (stderr));\n define_variable_cname (\"MAKE_TERMERR\", tty ? tty : DEFAULT_TTYNAME,\n o_default, 0)->export = v_export;\n }\n#endif\n\n /* Reset in case the switches changed our minds. */\n syncing = (output_sync == OUTPUT_SYNC_LINE\n || output_sync == OUTPUT_SYNC_TARGET);\n\n if (make_sync.syncout && ! syncing)\n output_close (&make_sync);\n\n make_sync.syncout = syncing;\n OUTPUT_SET (&make_sync);\n\n /* Figure out the level of recursion. */\n {\n struct variable *v = lookup_variable (STRING_SIZE_TUPLE (MAKELEVEL_NAME));\n if (v && v->value[0] != '\\0' && v->value[0] != '-')\n makelevel = (unsigned int) atoi (v->value);\n else\n makelevel = 0;\n\n v = lookup_variable (STRING_SIZE_TUPLE (MAKEPARENT_PID_NAME));\n if (v && v->value[0] != '\\0' && v->value[0] != '-')\n makeparent_pid = (pid_t) atoi (v->value);\n else\n makeparent_pid = (pid_t)0;\n\n v = lookup_variable (STRING_SIZE_TUPLE (MAKEPARENT_TARGET_NAME));\n if (v && v->value[0] != '\\0' && v->value[0] != '-') {\n makeparent_target = v->value;\n } else {\n makeparent_target = NULL;\n }\n }\n\n decode_trace_flags (tracing_opts);\n decode_verbosity_flags (verbosity_opts);\n\n /* FIXME: put into a subroutine like decode_trace_flags */\n if (post_mortem_flag) {\n debugger_on_error |= (DEBUGGER_ON_ERROR|DEBUGGER_ON_FATAL);\n debugger_enabled = 1;\n } else if (debugger_flag) {\n b_debugger_preread = false;\n job_slots = 1;\n i_debugger_stepping = 1;\n i_debugger_nexting = 0;\n debugger_enabled = 1;\n /* For now we'll do basic debugging. Later, \"stepping'\n will stop here while next won't - either way no printing.\n */\n db_level |= DB_BASIC | DB_CALL | DB_SHELL | DB_UPDATE_GOAL\n | DB_MAKEFILES;\n } else {\n /* debugging sets some things */\n if (debugger_opts) {\n const char **p;\n b_show_version = true;\n for (p = debugger_opts->list; *p != 0; ++p)\n {\n if (0 == strcmp(*p, \"preread\")) {\n b_debugger_preread = true;\n db_level |= DB_READ_MAKEFILES;\n }\n\n if (0 == strcmp(*p, \"goal\")) {\n b_debugger_goal = true;\n db_level |= DB_UPDATE_GOAL;\n }\n\n if ( 0 == strcmp(*p, \"full\") || b_debugger_preread || b_debugger_goal\n || 0 == strcmp(*p, \"preaction\") ) {\n job_slots = 1;\n i_debugger_stepping = 1;\n i_debugger_nexting = 0;\n debugger_enabled = 1;\n /* For now we'll do basic debugging. Later, \"stepping'\n will stop here while next won't - either way no printing.\n */\n db_level |= DB_BASIC | DB_CALL | DB_UPDATE_GOAL\n | b_debugger_goal ? 0 : DB_SHELL\n | DB_MAKEFILES;\n }\n if ( 0 == strcmp(*p, \"full\") || b_debugger_goal\n || 0 == strcmp(*p, \"error\") ) {\n debugger_on_error |= (DEBUGGER_ON_ERROR|DEBUGGER_ON_FATAL);\n } else if ( 0 == strcmp(*p, \"fatal\") ) {\n debugger_on_error |= DEBUGGER_ON_FATAL;\n }\n }\n#ifndef HAVE_LIBREADLINE\n O (error, NILF,\n \"warning: you specified a debugger option, but you don't have readline support\");\n O (error, NILF,\n \"debugger support compiled in. Debugger options will be ignored.\");\n#endif\n }\n }\n\n /* Set always_make_flag if -B was given and we've not restarted already. */\n always_make_flag = always_make_set && (restarts == 0);\n\n /* Print version information, and exit. */\n if (print_version_flag)\n {\n print_version ();\n die (MAKE_SUCCESS);\n }\n\n if (ISDB (DB_BASIC) && makelevel == 0 && b_show_version)\n print_version ();\n\n /* Set the \"MAKE_COMMAND\" variable to the name we were invoked with.\n (If it is a relative pathname with a slash, prepend our directory name\n so the result will run the same program regardless of the current dir.\n If it is a name with no slash, we can only hope that PATH did not\n find it in the current directory.) */\n if (current_directory[0] != '\\0'\n && argv[0] != 0 && argv[0][0] != '/' && strchr (argv[0], '/') != 0\n )\n argv[0] = xstrdup (concat (3, current_directory, \"/\", argv[0]));\n\n /* We may move, but until we do, here we are. */\n starting_directory = current_directory;\n\n /* Update profile global options from cli options */\n decode_profile_options();\n if (profile_flag) profile_init(PACKAGE_TARNAME \" \" PACKAGE_VERSION, argv, arg_job_slots);\n\n /* Validate the arg_job_slots configuration before we define MAKEFLAGS so\n users get an accurate value in their makefiles.\n At this point arg_job_slots is the argv setting, if there is one, else\n the MAKEFLAGS env setting, if there is one. */\n\n if (jobserver_auth)\n {\n /* We're a child in an existing jobserver group. */\n if (argv_slots == INVALID_JOB_SLOTS)\n {\n /* There's no -j option on the command line: check authorization. */\n if (jobserver_parse_auth (jobserver_auth))\n {\n /* Success! Use the jobserver. */\n goto job_setup_complete;\n }\n\n /* Oops: we have jobserver-auth but it's invalid :(. */\n O (error, NILF, _(\"warning: jobserver unavailable: using -j1. Add '+' to parent make rule.\"));\n arg_job_slots = 1;\n }\n\n /* The user provided a -j setting on the command line so use it: we're\n the master make of a new jobserver group. */\n else if (!restarts)\n ON (error, NILF,\n _(\"warning: -j%d forced in submake: resetting jobserver mode.\"),\n argv_slots);\n\n /* We can't use our parent's jobserver, so reset. */\n reset_jobserver ();\n }\n\n job_setup_complete:\n\n /* The extra indirection through $(MAKE_COMMAND) is done\n for hysterical raisins. */\n\n define_variable_cname (\"MAKE_COMMAND\", argv[0], o_default, 0);\n define_variable_cname (\"MAKE\", \"$(MAKE_COMMAND)\", o_default, 1);\n\n if (command_variables != 0)\n {\n struct command_variable *cv;\n struct variable *v;\n size_t len = 0;\n char *value, *p;\n\n /* Figure out how much space will be taken up by the command-line\n variable definitions. */\n for (cv = command_variables; cv != 0; cv = cv->next)\n {\n v = cv->variable;\n len += 2 * strlen (v->name);\n if (! v->recursive)\n ++len;\n ++len;\n len += 2 * strlen (v->value);\n ++len;\n }\n\n /* Now allocate a buffer big enough and fill it. */\n p = value = alloca (len);\n for (cv = command_variables; cv != 0; cv = cv->next)\n {\n v = cv->variable;\n p = quote_for_env (p, v->name);\n if (! v->recursive)\n *p++ = ':';\n *p++ = '=';\n p = quote_for_env (p, v->value);\n *p++ = ' ';\n }\n p[-1] = '\\0'; /* Kill the final space and terminate. */\n\n /* Define an unchangeable variable with a name that no POSIX.2\n makefile could validly use for its own variable. */\n define_variable_cname (\"-*-command-variables-*-\", value, o_automatic, 0);\n\n /* Define the variable; this will not override any user definition.\n Normally a reference to this variable is written into the value of\n MAKEFLAGS, allowing the user to override this value to affect the\n exported value of MAKEFLAGS. In POSIX-pedantic mode, we cannot\n allow the user's setting of MAKEOVERRIDES to affect MAKEFLAGS, so\n a reference to this hidden variable is written instead. */\n define_variable_cname (\"MAKEOVERRIDES\", \"${-*-command-variables-*-}\",\n o_env, 1);\n }\n\n /* If there were -C flags, move ourselves about. */\n if (directories != 0)\n {\n unsigned int i;\n for (i = 0; directories->list[i] != 0; ++i)\n {\n const char *dir = directories->list[i];\n if (chdir (dir) < 0)\n pfatal_with_name (dir);\n }\n }\n\n /* Except under -s, always do -w in sub-makes and under -C. */\n if (!silent_flag && (directories != 0 || makelevel > 0))\n print_directory = 1;\n\n /* Let the user disable that with --no-print-directory. */\n if (inhibit_print_directory)\n print_directory = 0;\n\n /* If -R was given, set -r too (doesn't make sense otherwise!) */\n if (no_builtin_variables_flag)\n no_builtin_rules_flag = 1;\n\n /* Construct the list of include directories to search. */\n\n construct_include_path (include_directories == 0\n ? 0 : include_directories->list);\n\n /* If we chdir'ed, figure out where we are now. */\n if (directories)\n {\n#ifdef WINDOWS32\n if (getcwd_fs (current_directory, GET_PATH_MAX) == 0)\n#else\n if (getcwd (current_directory, GET_PATH_MAX) == 0)\n#endif\n {\n#ifdef HAVE_GETCWD\n perror_with_name (\"getcwd\", \"\");\n#else\n OS (error, NILF, \"getwd: %s\", current_directory);\n#endif\n starting_directory = 0;\n }\n else\n starting_directory = current_directory;\n }\n\n define_variable_cname (\"CURDIR\", current_directory, o_file, 0);\n\n /* Read any stdin makefiles into temporary files. */\n\n if (makefiles != 0)\n {\n unsigned int i;\n for (i = 0; i < makefiles->idx; ++i)\n if (makefiles->list[i][0] == '-' && makefiles->list[i][1] == '\\0')\n {\n /* This makefile is standard input. Since we may re-exec\n and thus re-read the makefiles, we read standard input\n into a temporary file and read from that. */\n FILE *outfile;\n char *template;\n const char *tmpdir;\n\n if (stdin_nm)\n O (fatal, NILF,\n _(\"Makefile from standard input specified twice.\"));\n\n#ifdef P_tmpdir\n# define DEFAULT_TMPDIR P_tmpdir\n#else\n# define DEFAULT_TMPDIR \"/tmp\"\n#endif\n#define DEFAULT_TMPFILE \"GmXXXXXX\"\n\n if (((tmpdir = getenv (\"TMPDIR\")) == NULL || *tmpdir == '\\0')\n )\n tmpdir = DEFAULT_TMPDIR;\n\n template = alloca (strlen (tmpdir) + CSTRLEN (DEFAULT_TMPFILE) + 2);\n strcpy (template, tmpdir);\n\n#ifdef HAVE_DOS_PATHS\n if (strchr (\"/\\\\\", template[strlen (template) - 1]) == NULL)\n strcat (template, \"/\");\n#else\n# ifndef VMS\n if (template[strlen (template) - 1] != '/')\n strcat (template, \"/\");\n# endif /* !VMS */\n#endif /* !HAVE_DOS_PATHS */\n\n strcat (template, DEFAULT_TMPFILE);\n outfile = get_tmpfile (&stdin_nm, template);\n if (outfile == 0)\n pfatal_with_name (_(\"fopen (temporary file)\"));\n while (!feof (stdin) && ! ferror (stdin))\n {\n char buf[2048];\n size_t n = fread (buf, 1, sizeof (buf), stdin);\n if (n > 0 && fwrite (buf, 1, n, outfile) != n)\n pfatal_with_name (_(\"fwrite (temporary file)\"));\n }\n fclose (outfile);\n\n /* Replace the name that read_all_makefiles will\n see with the name of the temporary file. */\n makefiles->list[i] = strcache_add (stdin_nm);\n\n /* Make sure the temporary file will not be remade. */\n {\n struct file *f = enter_file (strcache_add (stdin_nm));\n f->updated = 1;\n f->update_status = us_success;\n f->command_state = cs_finished;\n /* Can't be intermediate, or it'll be removed too early for\n make re-exec. */\n f->intermediate = 0;\n f->dontcare = 0;\n }\n }\n }\n\n#ifndef __EMX__ /* Don't use a SIGCHLD handler for OS/2 */\n#if !defined(HAVE_WAIT_NOHANG) || defined(MAKE_JOBSERVER)\n /* Set up to handle children dying. This must be done before\n reading in the makefiles so that 'shell' function calls will work.\n\n If we don't have a hanging wait we have to fall back to old, broken\n functionality here and rely on the signal handler and counting\n children.\n\n If we're using the jobs pipe we need a signal handler so that SIGCHLD is\n not ignored; we need it to interrupt the read(2) of the jobserver pipe if\n we're waiting for a token.\n\n If none of these are true, we don't need a signal handler at all. */\n {\n# if defined SIGCHLD\n bsd_signal (SIGCHLD, child_handler);\n# endif\n# if defined SIGCLD && SIGCLD != SIGCHLD\n bsd_signal (SIGCLD, child_handler);\n# endif\n }\n\n#ifdef HAVE_PSELECT\n /* If we have pselect() then we need to block SIGCHLD so it's deferred. */\n {\n sigset_t block;\n sigemptyset (&block);\n sigaddset (&block, SIGCHLD);\n if (sigprocmask (SIG_SETMASK, &block, NULL) < 0)\n pfatal_with_name (\"sigprocmask(SIG_SETMASK, SIGCHLD)\");\n }\n#endif\n\n#endif\n#endif\n\n /* Let the user send us SIGUSR1 to toggle the -d flag during the run. */\n#ifdef SIGUSR1\n bsd_signal (SIGUSR1, debug_signal_handler);\n#endif\n\n /* Define the initial list of suffixes for old-style rules. */\n set_default_suffixes ();\n\n /* Define the file rules for the built-in suffix rules. These will later\n be converted into pattern rules. We used to do this in\n install_default_implicit_rules, but since that happens after reading\n makefiles, it results in the built-in pattern rules taking precedence\n over makefile-specified suffix rules, which is wrong. */\n install_default_suffix_rules ();\n\n /* Define some internal and special variables. */\n define_automatic_variables ();\n\n /* Set up the MAKEFLAGS and MFLAGS variables for makefiles to see.\n Initialize it to be exported but allow the makefile to reset it. */\n define_makeflags (0, 0)->export = v_export;\n\n /* Define the default variables. */\n define_default_variables ();\n\n default_file = enter_file (strcache_add (\".DEFAULT\"));\n\n default_goal_var = define_variable_cname (\".DEFAULT_GOAL\", \"\", o_file, 0);\n\n /* Evaluate all strings provided with --eval.\n Also set up the $(-*-eval-flags-*-) variable. */\n\n if (eval_strings)\n {\n char *p, *value;\n unsigned int i;\n size_t len = (CSTRLEN (\"--eval=\") + 1) * eval_strings->idx;\n\n for (i = 0; i < eval_strings->idx; ++i)\n {\n p = xstrdup (eval_strings->list[i]);\n len += 2 * strlen (p);\n eval_buffer (p, NULL);\n free (p);\n }\n\n p = value = alloca (len);\n for (i = 0; i < eval_strings->idx; ++i)\n {\n strcpy (p, \"--eval=\");\n p += CSTRLEN (\"--eval=\");\n p = quote_for_env (p, eval_strings->list[i]);\n *(p++) = ' ';\n }\n p[-1] = '\\0';\n\n define_variable_cname (\"-*-eval-flags-*-\", value, o_automatic, 0);\n }\n\n /* Read all the makefiles. */\n\n read_makefiles = read_all_makefiles (makefiles == 0 ? 0 : makefiles->list);\n\n#ifdef WINDOWS32\n /* look one last time after reading all Makefiles */\n if (no_default_sh_exe)\n no_default_sh_exe = !find_and_set_default_shell (NULL);\n#endif /* WINDOWS32 */\n\n#if defined (__MSDOS__) || defined (__EMX__) || defined (VMS)\n /* We need to know what kind of shell we will be using. */\n {\n extern int _is_unixy_shell (const char *_path);\n struct variable *shv = lookup_variable (STRING_SIZE_TUPLE (\"SHELL\"));\n extern int unixy_shell;\n extern const char *default_shell;\n\n if (shv && *shv->value)\n {\n char *shell_path = recursively_expand (shv);\n\n if (shell_path && _is_unixy_shell (shell_path))\n unixy_shell = 1;\n else\n unixy_shell = 0;\n if (shell_path)\n default_shell = shell_path;\n }\n }\n#endif /* __MSDOS__ || __EMX__ */\n\n {\n int old_builtin_rules_flag = no_builtin_rules_flag;\n int old_builtin_variables_flag = no_builtin_variables_flag;\n int old_arg_job_slots = arg_job_slots;\n\n arg_job_slots = INVALID_JOB_SLOTS;\n\n /* Decode switches again, for variables set by the makefile. */\n decode_env_switches (STRING_SIZE_TUPLE (\"GNUMAKEFLAGS\"));\n\n /* Clear GNUMAKEFLAGS to avoid duplication. */\n define_variable_cname (\"GNUMAKEFLAGS\", \"\", o_override, 0);\n\n decode_env_switches (STRING_SIZE_TUPLE (\"MAKEFLAGS\"));\n#if 0\n decode_env_switches (STRING_SIZE_TUPLE (\"MFLAGS\"));\n#endif\n\n /* If -j is not set in the makefile, or it was set on the command line,\n reset to use the previous value. */\n if (arg_job_slots == INVALID_JOB_SLOTS || argv_slots != INVALID_JOB_SLOTS)\n arg_job_slots = old_arg_job_slots;\n\n else if (jobserver_auth)\n {\n /* Makefile MAKEFLAGS set -j, but we already have a jobserver.\n Make us the master of a new jobserver group. */\n if (!restarts)\n ON (error, NILF,\n _(\"warning: -j%d forced in makefile: resetting jobserver mode.\"),\n arg_job_slots);\n\n /* We can't use our parent's jobserver, so reset. */\n reset_jobserver ();\n }\n\n /* Reset in case the switches changed our mind. */\n syncing = (output_sync == OUTPUT_SYNC_LINE\n || output_sync == OUTPUT_SYNC_TARGET);\n\n if (make_sync.syncout && ! syncing)\n output_close (&make_sync);\n\n make_sync.syncout = syncing;\n OUTPUT_SET (&make_sync);\n\n /* If we've disabled builtin rules, get rid of them. */\n if (no_builtin_rules_flag && ! old_builtin_rules_flag)\n {\n if (suffix_file->builtin)\n {\n free_dep_chain (suffix_file->deps);\n suffix_file->deps = 0;\n }\n define_variable_cname (\"SUFFIXES\", \"\", o_default, 0);\n }\n\n /* If we've disabled builtin variables, get rid of them. */\n if (no_builtin_variables_flag && ! old_builtin_variables_flag)\n undefine_default_variables ();\n }\n\n /* Final jobserver configuration.\n\n If we have jobserver_auth then we are a client in an existing jobserver\n group, that's already been verified OK above. If we don't have\n jobserver_auth and jobserver is enabled, then start a new jobserver.\n\n arg_job_slots = INVALID_JOB_SLOTS if we don't want -j in MAKEFLAGS\n\n arg_job_slots = # of jobs of parallelism\n\n job_slots = 0 for no limits on jobs, or when limiting via jobserver.\n\n job_slots = 1 for standard non-parallel mode.\n\n job_slots >1 for old-style parallelism without jobservers. */\n\n if (jobserver_auth)\n job_slots = 0;\n else if (arg_job_slots == INVALID_JOB_SLOTS)\n job_slots = 1;\n else\n job_slots = arg_job_slots;\n\n /* If we have >1 slot at this point, then we're a top-level make.\n Set up the jobserver.\n\n Every make assumes that it always has one job it can run. For the\n submakes it's the token they were given by their parent. For the top\n make, we just subtract one from the number the user wants. */\n\n if (job_slots > 1 && jobserver_setup (job_slots - 1))\n {\n /* Fill in the jobserver_auth for our children. */\n jobserver_auth = jobserver_get_auth ();\n\n if (jobserver_auth)\n {\n /* We're using the jobserver so set job_slots to 0. */\n master_job_slots = job_slots;\n job_slots = 0;\n }\n }\n\n /* If we're not using parallel jobs, then we don't need output sync.\n This is so people can enable output sync in GNUMAKEFLAGS or similar, but\n not have it take effect unless parallel builds are enabled. */\n if (syncing && job_slots == 1)\n {\n OUTPUT_UNSET ();\n output_close (&make_sync);\n syncing = 0;\n output_sync = OUTPUT_SYNC_NONE;\n }\n\n#ifndef MAKE_SYMLINKS\n if (check_symlink_flag)\n {\n O (error, NILF, _(\"Symbolic links not supported: disabling -L.\"));\n check_symlink_flag = 0;\n }\n#endif\n\n /* Set up MAKEFLAGS and MFLAGS again, so they will be right. */\n\n define_makeflags (1, 0);\n\n /* Make each 'struct goaldep' point at the 'struct file' for the file\n depended on. Also do magic for special targets. */\n\n snap_deps ();\n\n /* Convert old-style suffix rules to pattern rules. It is important to\n do this before installing the built-in pattern rules below, so that\n makefile-specified suffix rules take precedence over built-in pattern\n rules. */\n\n convert_to_pattern ();\n\n /* Install the default implicit pattern rules.\n This used to be done before reading the makefiles.\n But in that case, built-in pattern rules were in the chain\n before user-defined ones, so they matched first. */\n\n install_default_implicit_rules ();\n\n /* Compute implicit rule limits and do magic for pattern rules. */\n\n snap_implicit_rules ();\n\n /* Construct the listings of directories in VPATH lists. */\n\n build_vpath_lists ();\n\n /* Mark files given with -o flags as very old and as having been updated\n already, and files given with -W flags as brand new (time-stamp as far\n as possible into the future). If restarts is set we'll do -W later. */\n\n if (old_files != 0)\n {\n const char **p;\n for (p = old_files->list; *p != 0; ++p)\n {\n struct file *f = enter_file (*p);\n f->last_mtime = f->mtime_before_update = OLD_MTIME;\n f->updated = 1;\n f->update_status = us_success;\n f->command_state = cs_finished;\n }\n }\n\n if (!restarts && new_files != 0)\n {\n const char **p;\n for (p = new_files->list; *p != 0; ++p)\n {\n struct file *f = enter_file (*p);\n f->last_mtime = f->mtime_before_update = NEW_MTIME;\n }\n }\n\n /* Initialize the remote job module. */\n remote_setup ();\n\n /* Dump any output we've collected. */\n\n OUTPUT_UNSET ();\n output_close (&make_sync);\n\n if (read_makefiles)\n {\n /* Update any makefiles if necessary. */\n\n FILE_TIMESTAMP *makefile_mtimes;\n char **aargv = NULL;\n const char **nargv;\n int nargc;\n enum update_status status;\n\n DB (DB_BASIC, (_(\"Updating makefiles...\\n\")));\n\n {\n struct goaldep *d;\n unsigned int num_mkfiles = 0;\n for (d = read_makefiles; d != NULL; d = d->next)\n ++num_mkfiles;\n\n makefile_mtimes = alloca (num_mkfiles * sizeof (FILE_TIMESTAMP));\n }\n\n /* Remove any makefiles we don't want to try to update. Record the\n current modtimes of the others so we can compare them later. */\n {\n struct goaldep *d = read_makefiles;\n struct goaldep *last = NULL;\n unsigned int mm_idx = 0;\n\n while (d != 0)\n {\n struct file *f;\n\n for (f = d->file->double_colon; f != NULL; f = f->prev)\n if (f->deps == 0 && f->cmds != 0)\n break;\n\n if (f)\n {\n /* This makefile is a :: target with commands, but no\n dependencies. So, it will always be remade. This might\n well cause an infinite loop, so don't try to remake it.\n (This will only happen if your makefiles are written\n exceptionally stupidly; but if you work for Athena, that's\n how you write your makefiles.) */\n\n DB (DB_VERBOSE,\n (_(\"Makefile '%s' might loop; not remaking it.\\n\"),\n f->name));\n\n if (last)\n last->next = d->next;\n else\n read_makefiles = d->next;\n\n /* Free the storage. */\n free_goaldep (d);\n\n d = last ? last->next : read_makefiles;\n }\n else\n {\n makefile_mtimes[mm_idx++] = file_mtime_no_search (d->file);\n last = d;\n d = d->next;\n }\n }\n }\n\n /* Set up 'MAKEFLAGS' specially while remaking makefiles. */\n define_makeflags (1, 1);\n\n {\n int orig_db_level = db_level;\n\n if (! ISDB (DB_MAKEFILES))\n db_level = DB_NONE;\n\n rebuilding_makefiles = 1;\n\tstatus = update_goal_chain (read_makefiles);\n rebuilding_makefiles = 0;\n\n db_level = orig_db_level;\n }\n\n switch (status)\n {\n case us_question:\n /* The only way this can happen is if the user specified -q and asked\n for one of the makefiles to be remade as a target on the command\n line. Since we're not actually updating anything with -q we can\n treat this as \"did nothing\". */\n\n case us_none:\n /* Did nothing. */\n break;\n\n case us_failed:\n /* Failed to update. Figure out if we care. */\n {\n /* Nonzero if any makefile was successfully remade. */\n int any_remade = 0;\n /* Nonzero if any makefile we care about failed\n in updating or could not be found at all. */\n int any_failed = 0;\n unsigned int i;\n struct goaldep *d;\n\n for (i = 0, d = read_makefiles; d != 0; ++i, d = d->next)\n {\n if (d->file->updated)\n {\n /* This makefile was updated. */\n if (d->file->update_status == us_success)\n {\n /* It was successfully updated. */\n any_remade |= (file_mtime_no_search (d->file)\n != makefile_mtimes[i]);\n }\n else if (! (d->flags & RM_DONTCARE))\n {\n FILE_TIMESTAMP mtime;\n /* The update failed and this makefile was not\n from the MAKEFILES variable, so we care. */\n OS (error, NILF, _(\"Failed to remake makefile '%s'.\"),\n d->file->name);\n mtime = file_mtime_no_search (d->file);\n any_remade |= (mtime != NONEXISTENT_MTIME\n && mtime != makefile_mtimes[i]);\n makefile_status = MAKE_FAILURE;\n }\n }\n else\n /* This makefile was not found at all. */\n if (! (d->flags & RM_DONTCARE))\n {\n const char *dnm = dep_name (d);\n size_t l = strlen (dnm);\n\n /* This is a makefile we care about. See how much. */\n if (d->flags & RM_INCLUDED)\n /* An included makefile. We don't need to die, but we\n do want to complain. */\n error (NILF, l,\n _(\"Included makefile '%s' was not found.\"), dnm);\n else\n {\n /* A normal makefile. We must die later. */\n error (NILF, l,\n _(\"Makefile '%s' was not found\"), dnm);\n any_failed = 1;\n }\n }\n }\n\n if (any_remade)\n goto re_exec;\n if (any_failed)\n die (MAKE_FAILURE);\n break;\n }\n\n case us_success:\n re_exec:\n /* Updated successfully. Re-exec ourselves. */\n\n remove_intermediates (0);\n\n if (print_data_base_flag)\n print_data_base ();\n\n clean_jobserver (0);\n\n if (makefiles != 0)\n {\n /* These names might have changed. */\n int i, j = 0;\n for (i = 1; i < argc; ++i)\n if (strneq (argv[i], \"-f\", 2)) /* XXX */\n {\n if (argv[i][2] == '\\0')\n /* This cast is OK since we never modify argv. */\n argv[++i] = (char *) makefiles->list[j];\n else\n argv[i] = xstrdup (concat (2, \"-f\", makefiles->list[j]));\n ++j;\n }\n }\n\n /* Add -o option for the stdin temporary file, if necessary. */\n nargc = argc;\n if (stdin_nm)\n {\n void *m = xmalloc ((nargc + 2) * sizeof (char *));\n aargv = m;\n memcpy (aargv, argv, argc * sizeof (char *));\n aargv[nargc++] = xstrdup (concat (2, \"-o\", stdin_nm));\n aargv[nargc] = 0;\n nargv = m;\n }\n else\n nargv = (const char**)argv;\n\n if (directories != 0 && directories->idx > 0)\n {\n int bad = 1;\n if (directory_before_chdir != 0)\n {\n if (chdir (directory_before_chdir) < 0)\n perror_with_name (\"chdir\", \"\");\n else\n bad = 0;\n }\n if (bad)\n O (fatal, NILF,\n _(\"Couldn't change back to original directory.\"));\n }\n\n ++restarts;\n\n if (ISDB (DB_BASIC))\n {\n const char **p;\n printf (_(\"Re-executing[%u]:\"), restarts);\n for (p = nargv; *p != 0; ++p)\n printf (\" %s\", *p);\n putchar ('\\n');\n fflush (stdout);\n }\n\n {\n char **p;\n for (p = environ; *p != 0; ++p)\n {\n if (strneq (*p, MAKELEVEL_NAME \"=\", MAKELEVEL_LENGTH+1))\n {\n *p = alloca (40);\n sprintf (*p, \"%s=%u\", MAKELEVEL_NAME, makelevel);\n }\n else if (strneq (*p, \"MAKE_RESTARTS=\", CSTRLEN (\"MAKE_RESTARTS=\")))\n {\n *p = alloca (40);\n sprintf (*p, \"MAKE_RESTARTS=%s%u\",\n OUTPUT_IS_TRACED () ? \"-\" : \"\", restarts);\n restarts = 0;\n }\n }\n }\n\n /* If we didn't set the restarts variable yet, add it. */\n if (restarts)\n {\n char *b = alloca (40);\n sprintf (b, \"MAKE_RESTARTS=%s%u\",\n OUTPUT_IS_TRACED () ? \"-\" : \"\", restarts);\n putenv (b);\n }\n\n fflush (stdout);\n fflush (stderr);\n\n /* The exec'd \"child\" will be another make, of course. */\n jobserver_pre_child(1);\n\n#ifdef SET_STACK_SIZE\n /* Reset limits, if necessary. */\n if (stack_limit.rlim_cur)\n setrlimit (RLIMIT_STACK, &stack_limit);\n#endif\n exec_command ((char **)nargv, environ);\n\n /* We shouldn't get here but just in case. */\n jobserver_post_child(1);\n free (aargv);\n break;\n }\n }\n\n /* Set up 'MAKEFLAGS' again for the normal targets. */\n define_makeflags (1, 0);\n\n /* Set always_make_flag if -B was given. */\n always_make_flag = always_make_set;\n\n /* If restarts is set we haven't set up -W files yet, so do that now. */\n if (restarts && new_files != 0)\n {\n const char **p;\n for (p = new_files->list; *p != 0; ++p)\n {\n struct file *f = enter_file (*p);\n f->last_mtime = f->mtime_before_update = NEW_MTIME;\n }\n }\n\n /* If there is a temp file from reading a makefile from stdin, get rid of\n it now. */\n if (stdin_nm && unlink (stdin_nm) < 0 && errno != ENOENT)\n perror_with_name (_(\"unlink (temporary file): \"), stdin_nm);\n\n /* If there were no command-line goals, use the default. */\n if (goals == 0)\n {\n char *p;\n\n if (default_goal_var->recursive)\n p = variable_expand (default_goal_var->value);\n else\n {\n p = variable_buffer_output (variable_buffer, default_goal_var->value,\n strlen (default_goal_var->value));\n *p = '\\0';\n p = variable_buffer;\n }\n\n if (*p != '\\0')\n {\n struct file *f = lookup_file (p);\n\n /* If .DEFAULT_GOAL is a non-existent target, enter it into the\n table and let the standard logic sort it out. */\n if (f == 0)\n {\n struct nameseq *ns;\n\n ns = PARSE_SIMPLE_SEQ (&p, struct nameseq);\n if (ns)\n {\n /* .DEFAULT_GOAL should contain one target. */\n if (ns->next != 0)\n O (fatal, NILF,\n _(\".DEFAULT_GOAL contains more than one target\"));\n\n f = enter_file (strcache_add (ns->name));\n\n ns->name = 0; /* It was reused by enter_file(). */\n free_ns_chain (ns);\n }\n }\n\n if (f)\n {\n goals = alloc_goaldep ();\n goals->file = f;\n }\n }\n }\n else\n lastgoal->next = 0;\n\n\n if (!goals)\n {\n struct variable *v = lookup_variable (STRING_SIZE_TUPLE (\"MAKEFILE_LIST\"));\n if (v && v->value && v->value[0] != '\\0')\n O (fatal, NILF, _(\"No targets\"));\n\n O (fatal, NILF, _(\"No targets specified and no makefile found\"));\n }\n if (show_task_comments_flag) {\n dbg_cmd_info_targets(show_task_comments_flag\n\t\t\t ? INFO_TARGET_TASKS_WITH_COMMENTS\n\t\t\t : INFO_TARGET_TASKS);\n die(0);\n }\n if (show_tasks_flag) {\n dbg_cmd_info_tasks();\n die(0);\n } else if (show_targets_flag) {\n dbg_cmd_info_targets(INFO_TARGET_NAME);\n die(0);\n }\n\n /* Update the goals. */\n\n DB (DB_BASIC, (_(\"Updating goal targets...\\n\")));\n\n {\n switch (update_goal_chain (goals))\n {\n case us_none:\n /* Nothing happened. */\n /* FALLTHROUGH */\n case us_success:\n /* Keep the previous result. */\n break;\n case us_question:\n /* We are under -q and would run some commands. */\n makefile_status = MAKE_TROUBLE;\n break;\n case us_failed:\n /* Updating failed. POSIX.2 specifies exit status >1 for this; */\n makefile_status = MAKE_FAILURE;\n break;\n }\n\n /* If we detected some clock skew, generate one last warning */\n if (clock_skew_detected)\n O (error, NILF,\n _(\"warning: Clock skew detected. Your build may be incomplete.\"));\n\n /* Exit. */\n die (makefile_status);\n }\n\n /* NOTREACHED */\n exit (MAKE_SUCCESS);\n}\n\f\n/* Parsing of arguments, decoding of switches. */\n\nstatic char options[1 + sizeof (switches) / sizeof (switches[0]) * 3];\nstatic struct option long_options[(sizeof (switches) / sizeof (switches[0])) +\n (sizeof (long_option_aliases) /\n sizeof (long_option_aliases[0]))];\n\n/* Fill in the string and vector for getopt. */\nstatic void\ninit_switches (void)\n{\n char *p;\n unsigned int c;\n unsigned int i;\n\n if (options[0] != '\\0')\n /* Already done. */\n return;\n\n p = options;\n\n /* Return switch and non-switch args in order, regardless of\n POSIXLY_CORRECT. Non-switch args are returned as option 1. */\n *p++ = '-';\n\n for (i = 0; switches[i].c != '\\0'; ++i)\n {\n long_options[i].name = (char *) (switches[i].long_name == 0 ? \"\" :\n switches[i].long_name);\n long_options[i].flag = 0;\n long_options[i].val = switches[i].c;\n if (short_option (switches[i].c))\n *p++ = (char) switches[i].c;\n switch (switches[i].type)\n {\n case flag:\n case flag_off:\n case ignore:\n long_options[i].has_arg = no_argument;\n break;\n\n case string:\n case strlist:\n case filename:\n case positive_int:\n case floating:\n if (short_option (switches[i].c))\n *p++ = ':';\n if (switches[i].noarg_value != 0)\n {\n if (short_option (switches[i].c))\n *p++ = ':';\n long_options[i].has_arg = optional_argument;\n }\n else\n long_options[i].has_arg = required_argument;\n break;\n }\n }\n *p = '\\0';\n for (c = 0; c < (sizeof (long_option_aliases) /\n sizeof (long_option_aliases[0]));\n ++c)\n long_options[i++] = long_option_aliases[c];\n long_options[i].name = 0;\n}\n\n\n/* Non-option argument. It might be a variable definition. */\nstatic void\nhandle_non_switch_argument (const char *arg, int env)\n{\n struct variable *v;\n\n if (arg[0] == '-' && arg[1] == '\\0')\n /* Ignore plain '-' for compatibility. */\n return;\n\n v = try_variable_definition (0, arg, o_command, 0);\n if (v != 0)\n {\n /* It is indeed a variable definition. If we don't already have this\n one, record a pointer to the variable for later use in\n define_makeflags. */\n struct command_variable *cv;\n\n for (cv = command_variables; cv != 0; cv = cv->next)\n if (cv->variable == v)\n break;\n\n if (! cv)\n {\n cv = xmalloc (sizeof (*cv));\n cv->variable = v;\n cv->next = command_variables;\n command_variables = cv;\n }\n }\n else if (! env)\n {\n /* Not an option or variable definition; it must be a goal\n target! Enter it as a file and add it to the dep chain of\n goals. */\n struct file *f = enter_file (strcache_add (expand_command_line_file (arg)));\n f->cmd_target = 1;\n\n if (goals == 0)\n {\n goals = alloc_goaldep ();\n lastgoal = goals;\n }\n else\n {\n lastgoal->next = alloc_goaldep ();\n lastgoal = lastgoal->next;\n }\n\n lastgoal->file = f;\n\n {\n /* Add this target name to the MAKECMDGOALS variable. */\n struct variable *gv;\n const char *value;\n\n gv = lookup_variable (STRING_SIZE_TUPLE (\"MAKECMDGOALS\"));\n if (gv == 0)\n value = f->name;\n else\n {\n /* Paste the old and new values together */\n size_t oldlen, newlen;\n char *vp;\n\n oldlen = strlen (gv->value);\n newlen = strlen (f->name);\n vp = alloca (oldlen + 1 + newlen + 1);\n memcpy (vp, gv->value, oldlen);\n vp[oldlen] = ' ';\n memcpy (&vp[oldlen + 1], f->name, newlen + 1);\n value = vp;\n }\n define_variable_cname (\"MAKECMDGOALS\", value, o_default, 0);\n }\n }\n}\n\n/* Print a nice usage method. */\n\nstatic void\nprint_usage (int bad)\n{\n const char *const *cpp;\n FILE *usageto;\n\n if (print_version_flag)\n print_version ();\n\n usageto = bad ? stderr : stdout;\n\n fprintf (usageto, _(\"Usage: %s [options] [target] ...\\n\"), program);\n\n for (cpp = usage; *cpp; ++cpp)\n fputs (_(*cpp), usageto);\n\n if (!remote_description || *remote_description == '\\0')\n fprintf (usageto, _(\"\\nThis program built for %s\\n\"), make_host);\n else\n fprintf (usageto, _(\"\\nThis program built for %s (%s)\\n\"),\n make_host, remote_description);\n\n fprintf (usageto, _(\"Report bugs to https://github.com/rocky/remake/issues\\n\"));\n}\n\n/* Decode switches from ARGC and ARGV.\n They came from the environment if ENV is nonzero. */\n\nstatic void\ndecode_switches (int argc, const char **argv, int env)\n{\n int bad = 0;\n const struct command_switch *cs;\n struct stringlist *sl;\n int c;\n\n /* getopt does most of the parsing for us.\n First, get its vectors set up. */\n\n init_switches ();\n\n /* Let getopt produce error messages for the command line,\n but not for options from the environment. */\n opterr = !env;\n /* Reset getopt's state. */\n optind = 0;\n\n while (optind < argc)\n {\n const char *coptarg;\n\n /* Parse the next argument. */\n c = getopt_long (argc, (char*const*)argv, options, long_options, NULL);\n coptarg = optarg;\n if (c == EOF)\n /* End of arguments, or \"--\" marker seen. */\n break;\n else if (c == 1)\n /* An argument not starting with a dash. */\n handle_non_switch_argument (coptarg, env);\n else if (c == '?')\n /* Bad option. We will print a usage message and die later.\n But continue to parse the other options so the user can\n see all he did wrong. */\n bad = 1;\n else\n for (cs = switches; cs->c != '\\0'; ++cs)\n if (cs->c == c)\n {\n /* Whether or not we will actually do anything with\n this switch. We test this individually inside the\n switch below rather than just once outside it, so that\n options which are to be ignored still consume args. */\n int doit = !env || cs->env;\n\n switch (cs->type)\n {\n default:\n abort ();\n\n case ignore:\n break;\n\n case flag:\n case flag_off:\n if (doit)\n *(int *) cs->value_ptr = cs->type == flag;\n break;\n\n case string:\n case strlist:\n case filename:\n if (!doit)\n break;\n\n if (! coptarg)\n coptarg = xstrdup (cs->noarg_value);\n else if (*coptarg == '\\0')\n {\n char opt[2] = \"c\";\n const char *op = opt;\n\n if (short_option (cs->c))\n opt[0] = (char) cs->c;\n else\n op = cs->long_name;\n\n error (NILF, strlen (op),\n _(\"the '%s%s' option requires a non-empty string argument\"),\n short_option (cs->c) ? \"-\" : \"--\", op);\n bad = 1;\n break;\n }\n\n if (cs->type == string)\n {\n char **val = (char **)cs->value_ptr;\n free (*val);\n *val = xstrdup (coptarg);\n break;\n }\n\n sl = *(struct stringlist **) cs->value_ptr;\n if (sl == 0)\n {\n sl = xmalloc (sizeof (struct stringlist));\n sl->max = 5;\n sl->idx = 0;\n sl->list = xmalloc (5 * sizeof (char *));\n *(struct stringlist **) cs->value_ptr = sl;\n }\n else if (sl->idx == sl->max - 1)\n {\n sl->max += 5;\n /* MSVC erroneously warns without a cast here. */\n sl->list = xrealloc ((void *)sl->list,\n sl->max * sizeof (char *));\n }\n if (cs->type == filename)\n sl->list[sl->idx++] = expand_command_line_file (coptarg);\n else\n sl->list[sl->idx++] = xstrdup (coptarg);\n sl->list[sl->idx] = 0;\n break;\n\n case positive_int:\n /* See if we have an option argument; if we do require that\n it's all digits, not something like \"10foo\". */\n if (coptarg == 0 && argc > optind)\n {\n const char *cp;\n for (cp=argv[optind]; ISDIGIT (cp[0]); ++cp)\n ;\n if (cp[0] == '\\0')\n coptarg = argv[optind++];\n }\n\n if (!doit)\n break;\n\n if (coptarg)\n {\n int i = atoi (coptarg);\n const char *cp;\n\n /* Yes, I realize we're repeating this in some cases. */\n for (cp = coptarg; ISDIGIT (cp[0]); ++cp)\n ;\n\n if (i < 1 || cp[0] != '\\0')\n {\n error (NILF, 0,\n _(\"the '-%c' option requires a positive integer argument\"),\n cs->c);\n bad = 1;\n }\n else\n *(unsigned int *) cs->value_ptr = i;\n }\n else\n *(unsigned int *) cs->value_ptr\n = *(unsigned int *) cs->noarg_value;\n break;\n\n case floating:\n if (coptarg == 0 && optind < argc\n && (ISDIGIT (argv[optind][0]) || argv[optind][0] == '.'))\n coptarg = argv[optind++];\n\n if (doit)\n *(double *) cs->value_ptr\n = (coptarg != 0 ? atof (coptarg)\n : *(double *) cs->noarg_value);\n\n break;\n }\n\n /* We've found the switch. Stop looking. */\n break;\n }\n }\n\n /* There are no more options according to getting getopt, but there may\n be some arguments left. Since we have asked for non-option arguments\n to be returned in order, this only happens when there is a \"--\"\n argument to prevent later arguments from being options. */\n while (optind < argc)\n handle_non_switch_argument (argv[optind++], env);\n\n if (!env && (bad || print_usage_flag))\n {\n print_usage (bad);\n die (bad ? MAKE_FAILURE : MAKE_SUCCESS);\n }\n\n /* If there are any options that need to be decoded do it now. */\n decode_debug_flags ();\n decode_output_sync_flags ();\n\n /* Perform any special switch handling. */\n run_silent = silent_flag;\n\n}\n\n/* Decode switches from environment variable ENVAR (which is LEN chars long).\n We do this by chopping the value into a vector of words, prepending a\n dash to the first word if it lacks one, and passing the vector to\n decode_switches. */\n\nstatic void\ndecode_env_switches (const char *envar, size_t len)\n{\n char *varref = alloca (2 + len + 2);\n char *value, *p, *buf;\n int argc;\n const char **argv;\n\n /* Get the variable's value. */\n varref[0] = '$';\n varref[1] = '(';\n memcpy (&varref[2], envar, len);\n varref[2 + len] = ')';\n varref[2 + len + 1] = '\\0';\n value = variable_expand (varref);\n\n /* Skip whitespace, and check for an empty value. */\n NEXT_TOKEN (value);\n len = strlen (value);\n if (len == 0)\n return;\n\n /* Allocate a vector that is definitely big enough. */\n argv = alloca ((1 + len + 1) * sizeof (char *));\n\n /* getopt will look at the arguments starting at ARGV[1].\n Prepend a spacer word. */\n argv[0] = 0;\n argc = 1;\n\n /* We need a buffer to copy the value into while we split it into words\n and unquote it. Set up in case we need to prepend a dash later. */\n buf = alloca (1 + len + 1);\n buf[0] = '-';\n p = buf+1;\n argv[argc] = p;\n while (*value != '\\0')\n {\n if (*value == '\\\\' && value[1] != '\\0')\n ++value; /* Skip the backslash. */\n else if (ISBLANK (*value))\n {\n /* End of the word. */\n *p++ = '\\0';\n argv[++argc] = p;\n do\n ++value;\n while (ISBLANK (*value));\n continue;\n }\n *p++ = *value++;\n }\n *p = '\\0';\n argv[++argc] = 0;\n assert (p < buf + len + 2);\n\n if (argv[1][0] != '-' && strchr (argv[1], '=') == 0)\n /* The first word doesn't start with a dash and isn't a variable\n definition, so add a dash. */\n argv[1] = buf;\n\n /* Parse those words. */\n decode_switches (argc, argv, 1);\n}\n\f\n/* Quote the string IN so that it will be interpreted as a single word with\n no magic by decode_env_switches; also double dollar signs to avoid\n variable expansion in make itself. Write the result into OUT, returning\n the address of the next character to be written.\n Allocating space for OUT twice the length of IN is always sufficient. */\n\nstatic char *\nquote_for_env (char *out, const char *in)\n{\n while (*in != '\\0')\n {\n if (*in == '$')\n *out++ = '$';\n else if (ISBLANK (*in) || *in == '\\\\')\n *out++ = '\\\\';\n *out++ = *in++;\n }\n\n return out;\n}\n\n/* Define the MAKEFLAGS and MFLAGS variables to reflect the settings of the\n command switches. Include options with args if ALL is nonzero.\n Don't include options with the 'no_makefile' flag set if MAKEFILE. */\n\nstatic struct variable *\ndefine_makeflags (int all, int makefile)\n{\n const char ref[] = \"MAKEOVERRIDES\";\n const char posixref[] = \"-*-command-variables-*-\";\n const char evalref[] = \"$(-*-eval-flags-*-)\";\n const struct command_switch *cs;\n char *flagstring;\n char *p;\n\n /* We will construct a linked list of 'struct flag's describing\n all the flags which need to go in MAKEFLAGS. Then, once we\n know how many there are and their lengths, we can put them all\n together in a string. */\n\n struct flag\n {\n struct flag *next;\n const struct command_switch *cs;\n const char *arg;\n };\n struct flag *flags = 0;\n struct flag *last = 0;\n size_t flagslen = 0;\n#define ADD_FLAG(ARG, LEN) \\\n do { \\\n struct flag *new = alloca (sizeof (struct flag)); \\\n new->cs = cs; \\\n new->arg = (ARG); \\\n new->next = 0; \\\n if (! flags) \\\n flags = new; \\\n else \\\n last->next = new; \\\n last = new; \\\n if (new->arg == 0) \\\n /* Just a single flag letter: \" -x\" */ \\\n flagslen += 3; \\\n else \\\n /* \" -xfoo\", plus space to escape \"foo\". */ \\\n flagslen += 1 + 1 + 1 + (3 * (LEN)); \\\n if (!short_option (cs->c)) \\\n /* This switch has no single-letter version, so we use the long. */ \\\n flagslen += 2 + strlen (cs->long_name); \\\n } while (0)\n\n for (cs = switches; cs->c != '\\0'; ++cs)\n if (cs->toenv && (!makefile || !cs->no_makefile))\n switch (cs->type)\n {\n case ignore:\n break;\n\n case flag:\n case flag_off:\n if ((!*(int *) cs->value_ptr) == (cs->type == flag_off)\n && (cs->default_value == 0\n || *(int *) cs->value_ptr != *(int *) cs->default_value))\n\t if (cs->c != 'X') ADD_FLAG (0, 0);\n break;\n\n case positive_int:\n if (all)\n {\n if ((cs->default_value != 0\n && (*(unsigned int *) cs->value_ptr\n == *(unsigned int *) cs->default_value)))\n break;\n else if (cs->noarg_value != 0\n && (*(unsigned int *) cs->value_ptr ==\n *(unsigned int *) cs->noarg_value))\n ADD_FLAG (\"\", 0); /* Optional value omitted; see below. */\n else\n {\n char *buf = alloca (30);\n sprintf (buf, \"%u\", *(unsigned int *) cs->value_ptr);\n ADD_FLAG (buf, strlen (buf));\n }\n }\n break;\n\n case floating:\n if (all)\n {\n if (cs->default_value != 0\n && (*(double *) cs->value_ptr\n == *(double *) cs->default_value))\n break;\n else if (cs->noarg_value != 0\n && (*(double *) cs->value_ptr\n == *(double *) cs->noarg_value))\n ADD_FLAG (\"\", 0); /* Optional value omitted; see below. */\n else\n {\n char *buf = alloca (100);\n sprintf (buf, \"%g\", *(double *) cs->value_ptr);\n ADD_FLAG (buf, strlen (buf));\n }\n }\n break;\n\n case string:\n if (all)\n {\n p = *((char **)cs->value_ptr);\n if (p)\n ADD_FLAG (p, strlen (p));\n }\n break;\n\n case filename:\n case strlist:\n if (all)\n {\n struct stringlist *sl = *(struct stringlist **) cs->value_ptr;\n if (sl != 0)\n {\n unsigned int i;\n for (i = 0; i < sl->idx; ++i)\n ADD_FLAG (sl->list[i], strlen (sl->list[i]));\n }\n }\n break;\n\n default:\n abort ();\n }\n\n#undef ADD_FLAG\n\n /* Four more for the possible \" -- \", plus variable references. */\n flagslen += 4 + CSTRLEN (posixref) + 4 + CSTRLEN (evalref) + 4;\n\n /* Construct the value in FLAGSTRING.\n We allocate enough space for a preceding dash and trailing null. */\n flagstring = alloca (1 + flagslen + 1);\n memset (flagstring, '\\0', 1 + flagslen + 1);\n p = flagstring;\n\n /* Start with a dash, for MFLAGS. */\n *p++ = '-';\n\n /* Add simple options as a group. */\n while (flags != 0 && !flags->arg && short_option (flags->cs->c))\n {\n if (flags->cs->c != 'X') {\n *p++ = (char) flags->cs->c;\n flags = flags->next;\n }\n }\n\n /* Now add more complex flags: ones with options and/or long names. */\n while (flags)\n {\n *p++ = ' ';\n *p++ = '-';\n\n /* Add the flag letter or name to the string. */\n if (short_option (flags->cs->c)) {\n if (flags->cs->c != 'X') *p++ = (char) flags->cs->c;\n } else\n {\n /* Long options require a double-dash. */\n *p++ = '-';\n strcpy (p, flags->cs->long_name);\n p += strlen (p);\n }\n /* An omitted optional argument has an ARG of \"\". */\n if (flags->arg && flags->arg[0] != '\\0')\n {\n if (!short_option (flags->cs->c))\n /* Long options require '='. */\n *p++ = '=';\n p = quote_for_env (p, flags->arg);\n }\n flags = flags->next;\n }\n\n /* If no flags at all, get rid of the initial dash. */\n if (p == &flagstring[1])\n {\n flagstring[0] = '\\0';\n p = flagstring;\n }\n\n /* Define MFLAGS before appending variable definitions. Omit an initial\n empty dash. Since MFLAGS is not parsed for flags, there is no reason to\n override any makefile redefinition. */\n define_variable_cname (\"MFLAGS\",\n flagstring + (flagstring[0] == '-' && flagstring[1] == ' ' ? 2 : 0),\n o_env, 1);\n\n /* Write a reference to -*-eval-flags-*-, which contains all the --eval\n flag options. */\n if (eval_strings)\n {\n *p++ = ' ';\n memcpy (p, evalref, CSTRLEN (evalref));\n p += CSTRLEN (evalref);\n }\n\n if (all)\n {\n /* If there are any overrides to add, write a reference to\n $(MAKEOVERRIDES), which contains command-line variable definitions.\n Separate the variables from the switches with a \"--\" arg. */\n\n const char *r = posix_pedantic ? posixref : ref;\n size_t l = strlen (r);\n struct variable *v = lookup_variable (r, l);\n\n if (v && v->value && v->value[0] != '\\0')\n {\n strcpy (p, \" -- \");\n p += 4;\n\n *(p++) = '$';\n *(p++) = '(';\n memcpy (p, r, l);\n p += l;\n *(p++) = ')';\n }\n }\n\n /* If there is a leading dash, omit it. */\n if (flagstring[0] == '-')\n ++flagstring;\n\n /* This used to use o_env, but that lost when a makefile defined MAKEFLAGS.\n Makefiles set MAKEFLAGS to add switches, but we still want to redefine\n its value with the full set of switches. Then we used o_file, but that\n lost when users added -e, causing a previous MAKEFLAGS env. var. to take\n precedence over the new one. Of course, an override or command\n definition will still take precedence. */\n return define_variable_cname (\"MAKEFLAGS\", flagstring,\n env_overrides ? o_env_override : o_file, 1);\n}\n\f\n/* Print version information. */\n\nstatic void\nprint_version (void)\n{\n static int printed_version = 0;\n\n const char *precede = print_data_base_flag ? \"# \" : \"\";\n\n if (printed_version)\n /* Do it only once. */\n return;\n\n printf (\"%sGNU Make %s\\n\", precede, version_string);\n\n if (!remote_description || *remote_description == '\\0')\n printf (_(\"%sBuilt for %s\\n\"), precede, make_host);\n else\n printf (_(\"%sBuilt for %s (%s)\\n\"),\n precede, make_host, remote_description);\n\n /* Print this untranslated. The coding standards recommend translating the\n (C) to the copyright symbol, but this string is going to change every\n year, and none of the rest of it should be translated (including the\n word \"Copyright\"), so it hardly seems worth it. */\n\n printf (\"%sCopyright (C) 1988-2020 Free Software Foundation, Inc.\\n\"\n\t \"Copyright (C) 2015, 2017 Rocky Bernstein.\\n\",\n precede);\n\n printf (_(\"%sLicense GPLv3+: GNU GPL version 3 or later \\n\\\n%sThis is free software: you are free to change and redistribute it.\\n\\\n%sThere is NO WARRANTY, to the extent permitted by law.\\n\"),\n precede, precede, precede);\n\n printed_version = 1;\n\n /* Flush stdout so the user doesn't have to wait to see the\n version information while make thinks about things. */\n fflush (stdout);\n}\n\n/* Print a bunch of information about this and that. */\n\nstatic void\nprint_data_base (void)\n{\n time_t when = time ((time_t *) 0);\n\n print_version ();\n\n printf (_(\"\\n# Make data base, printed on %s\"), ctime (&when));\n\n print_variable_data_base ();\n print_dir_data_base ();\n print_rule_data_base (true);\n print_file_data_base ();\n print_vpath_data_base ();\n strcache_print_stats (\"#\");\n\n when = time ((time_t *) 0);\n printf (_(\"\\n# Finished Make data base on %s\\n\"), ctime (&when));\n}\n\nstatic void\nclean_jobserver (int status)\n{\n /* Sanity: have we written all our jobserver tokens back? If our\n exit status is 2 that means some kind of syntax error; we might not\n have written all our tokens so do that now. If tokens are left\n after any other error code, that's bad. */\n\n if (jobserver_enabled() && jobserver_tokens)\n {\n if (status != 2)\n ON (error, NILF,\n \"INTERNAL: Exiting with %u jobserver tokens (should be 0)!\",\n jobserver_tokens);\n else\n /* Don't write back the \"free\" token */\n while (--jobserver_tokens)\n jobserver_release (0);\n }\n\n\n /* Sanity: If we're the master, were all the tokens written back? */\n\n if (master_job_slots)\n {\n /* We didn't write one for ourself, so start at 1. */\n unsigned int tokens = 1 + jobserver_acquire_all ();\n\n if (tokens != master_job_slots)\n ONN (error, NILF,\n \"INTERNAL: Exiting with %u jobserver tokens available; should be %u!\",\n tokens, master_job_slots);\n\n reset_jobserver ();\n }\n}\n\f\n/* Exit with STATUS, cleaning up as necessary. */\n\nvoid\ndie (int status)\n{\n static char dying = 0;\n\n if (!dying)\n {\n int err;\n\n dying = 1;\n\n if (print_version_flag)\n print_version ();\n\n /* Wait for children to die. */\n err = (status != 0);\n while (job_slots_used > 0)\n reap_children (1, err, NULL);\n\n /* Let the remote job module clean up its state. */\n remote_cleanup ();\n\n /* Remove the intermediate files. */\n remove_intermediates (0);\n\n if (print_data_base_flag)\n print_data_base ();\n\n if (verify_flag)\n verify_file_data_base ();\n\n clean_jobserver (status);\n\n if (output_context)\n {\n /* die() might be called in a recipe output context due to an\n $(error ...) function. */\n output_close (output_context);\n\n if (output_context != &make_sync)\n output_close (&make_sync);\n\n OUTPUT_UNSET ();\n }\n\n output_close (NULL);\n\n /* Try to move back to the original directory. This is essential on\n MS-DOS (where there is really only one process), and on Unix it\n puts core files in the original directory instead of the -C\n directory. Must wait until after remove_intermediates(), or unlinks\n of relative pathnames fail. */\n if (directory_before_chdir != 0)\n {\n /* If it fails we don't care: shut up GCC. */\n int _x UNUSED;\n _x = chdir (directory_before_chdir);\n }\n }\n\n if (profile_flag) {\n const char *status_str;\n switch (status) {\n case MAKE_SUCCESS:\n\tstatus_str = \"Normal program termination\";\n\tbreak;\n case MAKE_TROUBLE:\n\tstatus_str = \"Platform failure termination\";\n\tbreak;\n case MAKE_FAILURE:\n\tstatus_str = \"Failure program termination\";\n\tbreak;\n case DEBUGGER_QUIT_RC:\n\tstatus_str = \"Debugger termination\";\n\tbreak;\n default:\n\tstatus_str = \"\";\n }\n\n profile_close(status_str, goals, (jobserver_auth != NULL));\n }\n exit (status);\n}\n"}, "files_after": {"src/main.c": "/* Argument parsing and main program of GNU Make.\nCopyright (C) 1988-2020 Free Software Foundation, Inc.\nCopyright (C) 2015, 2017 Rocky Bernstein\nThis file is part of GNU Make.\n\nGNU Make is free software; you can redistribute it and/or modify it under the\nterms of the GNU General Public License as published by the Free Software\nFoundation; either version 3 of the License, or (at your option) any later\nversion.\n\nGNU Make is distributed in the hope that it will be useful, but WITHOUT ANY\nWARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR\nA PARTICULAR PURPOSE. See the GNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License along with\nthis program. If not, see . */\n\n#include \"makeint.h\"\n#include \"globals.h\"\n#include \"profile.h\"\n#include \"os.h\"\n#include \"filedef.h\"\n#include \"dep.h\"\n#include \"variable.h\"\n#include \"job.h\"\n#include \"commands.h\"\n#include \"rule.h\"\n#include \"debug.h\"\n#include \"getopt.h\"\n// debugger include(s)\n#include \"cmd.h\"\n\n#include \n#ifdef WINDOWS32\n# include \n# include \n#ifdef HAVE_STRINGS_H\n# include \t/* for strcasecmp */\n#endif\n# include \"pathstuff.h\"\n# include \"sub_proc.h\"\n# include \"w32err.h\"\n#endif\n#ifdef HAVE_FCNTL_H\n# include \n#endif\n\nstruct goaldep *read_makefiles;\n\nextern void initialize_stopchar_map ();\n\n#if defined HAVE_WAITPID || defined HAVE_WAIT3\n# define HAVE_WAIT_NOHANG\n#endif\n\n#ifndef HAVE_UNISTD_H\nint chdir ();\n#endif\n#ifndef STDC_HEADERS\n# ifndef sun /* Sun has an incorrect decl in a header. */\nvoid exit (int) NORETURN;\n# endif\ndouble atof ();\n#endif\n\nstatic void clean_jobserver (int status);\nstatic void print_data_base (void);\nvoid print_rule_data_base (bool b_verbose);\nstatic void print_version (void);\nstatic void decode_switches (int argc, const char **argv, int env);\nstatic void decode_env_switches (const char *envar, size_t len);\nstatic struct variable *define_makeflags (int all, int makefile);\nstatic char *quote_for_env (char *out, const char *in);\nstatic void initialize_global_hash_tables (void);\n\n\f\n/* The structure that describes an accepted command switch. */\n\nstruct command_switch\n {\n int c; /* The switch character. */\n\n enum /* Type of the value. */\n {\n flag, /* Turn int flag on. */\n flag_off, /* Turn int flag off. */\n string, /* One string per invocation. */\n strlist, /* One string per switch. */\n filename, /* A string containing a file name. */\n positive_int, /* A positive integer. */\n floating, /* A floating-point number (double). */\n ignore /* Ignored. */\n } type;\n\n void *value_ptr; /* Pointer to the value-holding variable. */\n\n unsigned int env:1; /* Can come from MAKEFLAGS. */\n unsigned int toenv:1; /* Should be put in MAKEFLAGS. */\n unsigned int no_makefile:1; /* Don't propagate when remaking makefiles. */\n\n const void *noarg_value; /* Pointer to value used if no arg given. */\n const void *default_value; /* Pointer to default value. */\n\n const char *long_name; /* Long option name. */\n };\n\n/* True if C is a switch value that corresponds to a short option. */\n\n#define short_option(c) ((c) <= CHAR_MAX)\n\n/* The structure used to hold the list of strings given\n in command switches of a type that takes strlist arguments. */\n\n/* The recognized command switches. */\n\nstatic const int default_silent_flag = 0;\n\n/* Nonzero means either -s was given, or .SILENT-with-no-deps was seen. */\n\nint run_silent = 0;\n\n/*! If non-null, contains the type of tracing we are to do.\n This is coordinated with tracing_flag. */\nstringlist_t *tracing_opts = NULL;\n\n/*! If true, show version information on entry. */\nbool b_show_version = false;\n\n/*! If true, go into debugger on error.\nSets --debugger --debugger-stop=error. */\nint post_mortem_flag = 0;\n\n/*! Nonzero means use GNU readline in the debugger. */\nint use_readline_flag =\n#ifdef HAVE_LIBREADLINE\n 1\n#else\n 0\n#endif\n ;\n\n/*! If nonzero, the basename of filenames is in giving locations. Normally,\n giving a file directory location helps a debugger frontend\n when we change directories. For regression tests it is helpful to\n list just the basename part as that doesn't change from installation\n to installation. Users may have their preferences too.\n*/\nint basename_filenames = 0;\n\n/* Synchronize output (--output-sync). */\n\nchar *output_sync_option = 0;\n\n/* Specify profile output formatting (--profile) */\n\nchar *profile_option = 0;\n\n/* Specify the output directory for profiling information */\n\nstatic struct stringlist *profile_dir_opt = 0;\n\n/* Output level (--verbosity). */\n\nstatic struct stringlist *verbosity_opts;\n\n/* Environment variables override makefile definitions. */\n\n/* Nonzero means keep going even if remaking some file fails (-k). */\n\nint keep_going_flag;\nstatic const int default_keep_going_flag = 0;\n\n/*! Nonzero gives a list of explicit target names that have commands\n AND comments associated with them and exits. Set by option --task-comments\n */\n\nint show_task_comments_flag = 0;\n\n/* Nonzero means ignore print_directory and never print the directory.\n This is necessary because print_directory is set implicitly. */\n\nint inhibit_print_directory = 0;\n\n/* List of makefiles given with -f switches. */\n\nstatic struct stringlist *makefiles = 0;\n\n/* Size of the stack when we started. */\n\n#ifdef SET_STACK_SIZE\nstruct rlimit stack_limit;\n#endif\n\n\n/* Number of job slots for parallelism. */\n\nunsigned int job_slots;\n\n#define INVALID_JOB_SLOTS (-1)\nstatic unsigned int master_job_slots = 0;\nstatic int arg_job_slots = INVALID_JOB_SLOTS;\n\nstatic const int default_job_slots = INVALID_JOB_SLOTS;\n\n/* Value of job_slots that means no limit. */\n\nstatic const int inf_jobs = 0;\n\n/* Authorization for the jobserver. */\n\nstatic char *jobserver_auth = NULL;\n\n/* Handle for the mutex used on Windows to synchronize output of our\n children under -O. */\n\nchar *sync_mutex = NULL;\n\n/* Maximum load average at which multiple jobs will be run.\n Negative values mean unlimited, while zero means limit to\n zero load (which could be useful to start infinite jobs remotely\n but one at a time locally). */\ndouble max_load_average = -1.0;\ndouble default_load_average = -1.0;\n\n/* List of directories given with -C switches. */\n\nstatic struct stringlist *directories = 0;\n\n/* List of include directories given with -I switches. */\n\nstatic struct stringlist *include_directories = 0;\n\n/* List of files given with -o switches. */\n\nstatic struct stringlist *old_files = 0;\n\n/* List of files given with -W switches. */\n\nstatic struct stringlist *new_files = 0;\n\n/* List of strings to be eval'd. */\nstatic struct stringlist *eval_strings = 0;\n\n/* If nonzero, we should just print usage and exit. */\n\nstatic int print_usage_flag = 0;\n\n/*! Do we want to go into a debugger or not?\n Values are \"error\" - enter on errors or fatal errors\n \"fatal\" - enter on fatal errors\n \"goal\" - set to enter debugger before updating goal\n \"preread\" - set to enter debugger before reading makefile(s)\n \"preaction\" - set to enter debugger before performing any\n actions(s)\n \"full\" - \"enter\" + \"error\" + \"fatal\"\n*/\nstatic stringlist_t* debugger_opts = NULL;\n\n/* If nonzero, always build all targets, regardless of whether\n they appear out of date or not. */\nstatic int always_make_set = 0;\nint always_make_flag = 0;\n\n/* If nonzero, we're in the \"try to rebuild makefiles\" phase. */\n\nint rebuilding_makefiles = 0;\n\n\f\n/* The usage output. We write it this way to make life easier for the\n translators, especially those trying to translate to right-to-left\n languages like Hebrew. */\n\nstatic const char *const usage[] =\n {\n N_(\"Options:\\n\"),\n N_(\"\\\n -b, -m Ignored for compatibility.\\n\"),\n N_(\"\\\n -B, --always-make Unconditionally make all targets.\\n\"),\n N_(\"\\\n -c, --search-parent Search parent directories for Makefile.\\n\"),\n N_(\"\\\n -C DIRECTORY, --directory=DIRECTORY\\n\\\n Change to DIRECTORY before doing anything.\\n\"),\n N_(\"\\\n -d Print lots of debugging information.\\n\"),\n N_(\"\\\n --debug[=FLAGS] Print various types of debugging information.\\n\"),\n N_(\"\\\n -e, --environment-overrides\\n\\\n Environment variables override makefiles.\\n\"),\n N_(\"\\\n -E STRING, --eval=STRING Evaluate STRING as a makefile statement.\\n\"),\n N_(\"\\\n -f FILE, --file=FILE, --makefile=FILE\\n\\\n Read FILE as a makefile.\\n\"),\n N_(\"\\\n -h, --help Print this message and exit.\\n\"),\n N_(\"\\\n -i, --ignore-errors Ignore errors from recipes.\\n\"),\n N_(\"\\\n -I DIRECTORY, --include-dir=DIRECTORY\\n\\\n Search DIRECTORY for included makefiles.\\n\"),\n N_(\"\\\n -j [N], --jobs[=N] Allow N jobs at once; infinite jobs with no arg.\\n\"),\n N_(\"\\\n -k, --keep-going Keep going when some targets can't be made.\\n\"),\n N_(\"\\\n -l [N], --load-average[=N], --max-load[=N]\\n\\\n Don't start multiple jobs unless load is below N.\\n\"),\n N_(\"\\\n -L, --check-symlink-times Use the latest mtime between symlinks and target.\\n\"),\n N_(\"\\\n --no-extended-errors Do not give additional error reporting.\\n\"),\n N_(\"\\\n -n, --just-print, --dry-run, --recon\\n\\\n Don't actually run any recipe; just print them.\\n\"),\n N_(\"\\\n -o FILE, --old-file=FILE, --assume-old=FILE\\n\\\n Consider FILE to be very old and don't remake it.\\n\"),\n N_(\"\\\n -O[TYPE], --output-sync[=TYPE]\\n\\\n Synchronize output of parallel jobs by TYPE.\\n\"),\n N_(\"\\\n -p, --print-data-base Print make's internal database.\\n\"),\n N_(\"\\\n -P, --profile[=FORMAT] Print profiling information for each target using FORMAT.\\n\\\n If FORMAT isn't specified, default to \\\"callgrind\\\"\\n\"),\n N_(\"\\\n --profile-directory=DIR Output profiling data to the DIR directory.\\n\"),\n N_(\"\\\n -q, --question Run no recipe; exit status says if up to date.\\n\"),\n N_(\"\\\n -r, --no-builtin-rules Disable the built-in implicit rules.\\n\"),\n N_(\"\\\n -R, --no-builtin-variables Disable the built-in variable settings.\\n\"),\n N_(\"\\\n -s, --silent, --quiet Don't echo recipes.\\n\"),\n N_(\"\\\n --no-silent Echo recipes (disable --silent mode).\\n\"),\n N_(\"\\\n -S, --no-keep-going, --stop\\n\\\n Turns off -k.\\n\"),\n N_(\"\\\n --targets Give list of explicitly-named targets.\\n\"),\n N_(\"\\\n --tasks Give list of targets which have descriptions\\n\\\n associated with them.\\n\"),\n N_(\"\\\n -t, --touch Touch targets instead of remaking them.\\n\"),\n N_(\"\\\n -v, --version Print the version number of make and exit.\\n\"),\n N_(\"\\\n --verbosity=LEVEL Set verbosity level. LEVEL may be \\\"terse\\\" \\\"no-header\\\" or\\n\\\n \\\"full\\\". The default is \\\"full\\\".\\n\"),\n N_(\"\\\n -w, --print-directory Print the current directory.\\n\"),\n N_(\"\\\n --no-print-directory Turn off -w, even if it was turned on implicitly.\\n\"),\n N_(\"\\\n -W FILE, --what-if=FILE, --new-file=FILE, --assume-new=FILE\\n\\\n Consider FILE to be infinitely new.\\n\"),\n N_(\"\\\n --warn-undefined-variables Warn when an undefined variable is referenced.\\n\"),\n N_(\"\\\n -x, --trace[=TYPE] Trace command execution TYPE may be\\n\\\n \\\"command\\\", \\\"read\\\", \\\"normal\\\".\\\"\\n\\\n \\\"noshell\\\", or \\\"full\\\". Default is \\\"normal\\\"\\n\"),\n N_(\"\\\n --debugger-stop[=TYPE] Which point to enter debugger. TYPE may be\\n\\\n \\\"goal\\\", \\\"preread\\\", \\\"preaction\\\",\\n\\\n \\\"full\\\", \\\"error\\\", or \\\"fatal\\\".\\n\\\n Only makes sense with -X set.\\n\"),\n N_(\"\\\n -v, --version Print the version number of make and exit.\\n\"),\n N_(\"\\\n -X, --debugger Enter debugger.\\n\"),\n N_(\"\\\n -!, --post-mortem Go into debugger on error.\\n\\\n Same as --debugger --debugger-stop=error\\n\"),\n N_(\"\\\n --no-readline Do not use GNU ReadLine in debugger.\\n\"),\n NULL\n };\n\n/* The table of command switches.\n Order matters here: this is the order MAKEFLAGS will be constructed.\n So be sure all simple flags (single char, no argument) come first. */\n\nstatic const struct command_switch switches[] =\n {\n { 'b', ignore, 0, 0, 0, 0, 0, 0, 0 },\n { 'B', flag, &always_make_set, 1, 1, 0, 0, 0, \"always-make\" },\n { 'c', flag, &search_parent_flag, 1, 1, 0, 0, 0, \"search-parent\" },\n { 'd', flag, &debug_flag, 1, 1, 0, 0, 0, 0 },\n { 'e', flag, &env_overrides, 1, 1, 0, 0, 0, \"environment-overrides\", },\n { 'h', flag, &print_usage_flag, 0, 0, 0, 0, 0, \"help\" },\n { 'i', flag, &ignore_errors_flag, 1, 1, 0, 0, 0, \"ignore-errors\" },\n { 'k', flag, &keep_going_flag, 1, 1, 0, 0, &default_keep_going_flag,\n \"keep-going\" },\n { 'L', flag, &check_symlink_flag, 1, 1, 0, 0, 0, \"check-symlink-times\" },\n { 'm', ignore, 0, 0, 0, 0, 0, 0, 0 },\n { 'n', flag, &just_print_flag, 1, 1, 1, 0, 0, \"just-print\" },\n { 'p', flag, &print_data_base_flag, 1, 1, 0, 0, 0, \"print-data-base\" },\n { 'P', string, &profile_option, 1, 1, 0, \"callgrind\", 0, \"profile\" },\n { 'q', flag, &question_flag, 1, 1, 1, 0, 0, \"question\" },\n { 'r', flag, &no_builtin_rules_flag, 1, 1, 0, 0, 0, \"no-builtin-rules\" },\n { 'R', flag, &no_builtin_variables_flag, 1, 1, 0, 0, 0,\n \"no-builtin-variables\" },\n { 's', flag, &silent_flag, 1, 1, 0, 0, &default_silent_flag, \"silent\" },\n { 'S', flag_off, &keep_going_flag, 1, 1, 0, 0, &default_keep_going_flag,\n \"no-keep-going\" },\n { 't', flag, &touch_flag, 1, 1, 1, 0, 0, \"touch\" },\n { 'v', flag, &print_version_flag, 1, 1, 0, 0, 0, \"version\" },\n { 'w', flag, &print_directory, 1, 1, 0, 0, 0, \"print-directory\" },\n { 'X', flag, &debugger_flag, 1, 1, 0, 0, 0, \"debugger\" },\n { '!', flag, &post_mortem_flag, 1, 1, 0, 0, 0, \"post-mortem\" },\n\n /* These options take arguments. */\n { 'C', filename, &directories, 0, 0, 0, 0, 0, \"directory\" },\n { 'E', strlist, &eval_strings, 1, 0, 0, 0, 0, \"eval\" },\n { 'f', filename, &makefiles, 0, 0, 0, 0, 0, \"file\" },\n { 'I', filename, &include_directories, 1, 1, 0, 0, 0,\n \"include-dir\" },\n { 'j', positive_int, &arg_job_slots, 1, 1, 0, &inf_jobs, &default_job_slots,\n \"jobs\" },\n { 'l', floating, &max_load_average, 1, 1, 0, &default_load_average,\n &default_load_average, \"load-average\" },\n { 'o', filename, &old_files, 0, 0, 0, 0, 0, \"old-file\" },\n { 'O', string, &output_sync_option, 1, 1, 0, \"target\", 0, \"output-sync\" },\n { 'W', filename, &new_files, 0, 0, 0, 0, 0, \"what-if\" },\n { 'x', strlist, &tracing_opts, 1, 1, 0, \"normal\", 0, \"trace\" },\n\n /* These are long-style options. */\n { CHAR_MAX+1, strlist, &db_flags, 1, 1, 0, \"basic\", 0, \"debug\" },\n { CHAR_MAX+2, string, &jobserver_auth, 1, 1, 0, 0, 0, \"jobserver-auth\" },\n { CHAR_MAX+3, flag, &show_tasks_flag, 0, 0, 0, 0, 0, \"tasks\" },\n { CHAR_MAX+4, flag, &inhibit_print_directory, 1, 1, 0, 0, 0,\n \"no-print-directory\" },\n { CHAR_MAX+5, flag, &warn_undefined_variables_flag, 1, 1, 0, 0, 0,\n \"warn-undefined-variables\" },\n { CHAR_MAX+7, string, &sync_mutex, 1, 1, 0, 0, 0, \"sync-mutex\" },\n { CHAR_MAX+8, flag_off, &silent_flag, 1, 1, 0, 0, &default_silent_flag, \"no-silent\" },\n { CHAR_MAX+9, string, &jobserver_auth, 1, 0, 0, 0, 0, \"jobserver-fds\" },\n { CHAR_MAX+10, strlist, &verbosity_opts, 1, 1, 0, 0, 0,\n \"verbosity\" },\n { CHAR_MAX+11, flag, (char *) &no_extended_errors, 1, 1, 0, 0, 0,\n \"no-extended-errors\", },\n { CHAR_MAX+12, flag_off, (char *) &use_readline_flag, 1, 0, 0, 0, 0,\n \"no-readline\", },\n { CHAR_MAX+13, flag, &show_targets_flag, 0, 0, 0, 0, 0,\n \"targets\" },\n { CHAR_MAX+14, strlist, &debugger_opts, 1, 1, 0, \"preaction\", 0,\n \"debugger-stop\" },\n { CHAR_MAX+15, filename, &profile_dir_opt, 1, 1, 0, 0, 0, \"profile-directory\" },\n { 0, 0, 0, 0, 0, 0, 0, 0, 0 }\n };\n\n/* Secondary long names for options. */\n\nstatic struct option long_option_aliases[] =\n {\n { \"quiet\", no_argument, 0, 's' },\n { \"stop\", no_argument, 0, 'S' },\n { \"new-file\", required_argument, 0, 'W' },\n { \"assume-new\", required_argument, 0, 'W' },\n { \"assume-old\", required_argument, 0, 'o' },\n { \"max-load\", optional_argument, 0, 'l' },\n { \"dry-run\", no_argument, 0, 'n' },\n { \"recon\", no_argument, 0, 'n' },\n { \"makefile\", required_argument, 0, 'f' },\n };\n\n/* List of goal targets. */\n\nstatic struct goaldep *goals, *lastgoal;\n\n/* List of variables which were defined on the command line\n (or, equivalently, in MAKEFLAGS). */\n\nstruct command_variable\n {\n struct command_variable *next;\n struct variable *variable;\n };\nstatic struct command_variable *command_variables;\n\n/*! Value of argv[0] which seems to get modified. Can we merge this with\n program below? */\nchar *argv0 = NULL;\n\n/*! The name we were invoked with. */\n\n/*! Our initial arguments -- used for debugger restart execvp. */\nconst char * const*global_argv;\n\n/*! Our current directory before processing any -C options. */\nchar *directory_before_chdir = NULL;\n\n/*! Pointer to the value of the .DEFAULT_GOAL special variable.\n The value will be the name of the goal to remake if the command line\n does not override it. It can be set by the makefile, or else it's\n the first target defined in the makefile whose name does not start\n with '.'. */\nstruct variable * default_goal_var;\n\n/*! Pointer to structure for the file .DEFAULT\n whose commands are used for any file that has none of its own.\n This is zero if the makefiles do not define .DEFAULT. */\nstruct file *default_file;\n\n/* Nonzero if we have seen the '.SECONDEXPANSION' target.\n This turns on secondary expansion of prerequisites. */\n\nint second_expansion;\n\n/* Nonzero if we have seen the '.ONESHELL' target.\n This causes the entire recipe to be handed to SHELL\n as a single string, potentially containing newlines. */\n\nint one_shell;\n\n/* Nonzero if we have seen the '.NOTPARALLEL' target.\n This turns off parallel builds for this invocation of make. */\n\nint not_parallel;\n\n/* Nonzero if some rule detected clock skew; we keep track so (a) we only\n print one warning about it during the run, and (b) we can print a final\n warning at the end of the run. */\n\nint clock_skew_detected;\n\n/* If output-sync is enabled we'll collect all the output generated due to\n options, while reading makefiles, etc. */\n\nstruct output make_sync;\n\n\f\n/* Mask of signals that are being caught with fatal_error_signal. */\n\n#if defined(POSIX)\nsigset_t fatal_signal_set;\n#elif defined(HAVE_SIGSETMASK)\nint fatal_signal_mask;\n#endif\n\n#if !HAVE_DECL_BSD_SIGNAL && !defined bsd_signal\n# if !defined HAVE_SIGACTION\n# define bsd_signal signal\n# else\ntypedef RETSIGTYPE (*bsd_signal_ret_t) (int);\n\nstatic bsd_signal_ret_t\nbsd_signal (int sig, bsd_signal_ret_t func)\n{\n struct sigaction act, oact;\n act.sa_handler = func;\n act.sa_flags = SA_RESTART;\n sigemptyset (&act.sa_mask);\n sigaddset (&act.sa_mask, sig);\n if (sigaction (sig, &act, &oact) != 0)\n return SIG_ERR;\n return oact.sa_handler;\n}\n# endif\n#endif\n\nvoid\ndecode_trace_flags (stringlist_t *ppsz_tracing_opts)\n{\n if (ppsz_tracing_opts) {\n const char **p;\n db_level |= (DB_TRACE | DB_SHELL);\n if (!ppsz_tracing_opts->list)\n db_level |= (DB_BASIC);\n else\n for (p = ppsz_tracing_opts->list; *p != 0; ++p) {\n if (0 == strcmp(*p, \"command\"))\n ;\n else if (0 == strcmp(*p, \"full\"))\n db_level |= (DB_VERBOSE|DB_READ_MAKEFILES);\n else if (0 == strcmp(*p, \"normal\"))\n db_level |= DB_BASIC;\n else if (0 == strcmp(*p, \"noshell\"))\n db_level = DB_BASIC | DB_TRACE;\n else if (0 == strcmp(*p, \"read\"))\n db_level |= DB_READ_MAKEFILES;\n else\n OS ( fatal, NILF, _(\"unknown trace command execution type `%s'\"), *p);\n }\n }\n}\n\nvoid\ndecode_verbosity_flags (stringlist_t *ppsz_verbosity_opts)\n{\n if (ppsz_verbosity_opts) {\n const char **p;\n if (ppsz_verbosity_opts->list)\n for (p = ppsz_verbosity_opts->list; *p != 0; ++p) {\n if (0 == strcmp(*p, \"no-header\"))\n b_show_version = false;\n else if (0 == strcmp(*p, \"full\")) {\n db_level |= (DB_VERBOSE);\n\t b_show_version = true;\n\t} else if (0 == strcmp(*p, \"terse\")) {\n\t db_level &= (~DB_VERBOSE);\n\t b_show_version = false;\n\t}\n }\n }\n}\n\nstatic void\ninitialize_global_hash_tables (void)\n{\n init_hash_global_variable_set ();\n strcache_init ();\n init_hash_files ();\n hash_init_directories ();\n hash_init_function_table ();\n}\n\n/* This character map locate stop chars when parsing GNU makefiles.\n Each element is true if we should stop parsing on that character. */\n\nstatic const char *\nexpand_command_line_file (const char *name)\n{\n const char *cp;\n char *expanded = 0;\n\n if (name[0] == '\\0')\n O (fatal, NILF, _(\"empty string invalid as file name\"));\n\n if (name[0] == '~')\n {\n expanded = remake_tilde_expand (name);\n if (expanded && expanded[0] != '\\0')\n name = expanded;\n }\n\n /* This is also done in parse_file_seq, so this is redundant\n for names read from makefiles. It is here for names passed\n on the command line. */\n while (name[0] == '.' && name[1] == '/')\n {\n name += 2;\n while (name[0] == '/')\n /* Skip following slashes: \".//foo\" is \"foo\", not \"/foo\". */\n ++name;\n }\n\n if (name[0] == '\\0')\n {\n /* Nothing else but one or more \"./\", maybe plus slashes! */\n name = \"./\";\n }\n\n cp = strcache_add (name);\n\n free (expanded);\n\n return cp;\n}\n\n/* Toggle -d on receipt of SIGUSR1. */\n\n#ifdef SIGUSR1\nstatic RETSIGTYPE\ndebug_signal_handler (int sig UNUSED)\n{\n db_level = db_level ? DB_NONE : DB_BASIC;\n}\n#endif\n\nstatic void\ndecode_debug_flags (void)\n{\n const char **pp;\n\n if (debug_flag)\n db_level = DB_ALL;\n\n if (db_flags)\n for (pp=db_flags->list; *pp; ++pp)\n {\n const char *p = *pp;\n\n while (1)\n {\n switch (tolower (p[0]))\n {\n case 'a':\n db_level |= DB_ALL;\n break;\n case 'b':\n db_level |= DB_BASIC;\n break;\n case 'i':\n db_level |= DB_BASIC | DB_IMPLICIT;\n break;\n case 'j':\n db_level |= DB_JOBS;\n break;\n case 'm':\n db_level |= DB_BASIC | DB_MAKEFILES;\n break;\n case 'n':\n db_level = 0;\n break;\n case 'v':\n db_level |= DB_BASIC | DB_VERBOSE;\n break;\n default:\n OS (fatal, NILF,\n _(\"unknown debug level specification '%s'\"), p);\n }\n\n while (*(++p) != '\\0')\n if (*p == ',' || *p == ' ')\n {\n ++p;\n break;\n }\n\n if (*p == '\\0')\n break;\n }\n }\n\n if (db_level)\n verify_flag = 1;\n\n if (! db_level)\n debug_flag = 0;\n}\n\nstatic void\ndecode_output_sync_flags (void)\n{\n#ifdef NO_OUTPUT_SYNC\n output_sync = OUTPUT_SYNC_NONE;\n#else\n if (output_sync_option)\n {\n if (streq (output_sync_option, \"none\"))\n output_sync = OUTPUT_SYNC_NONE;\n else if (streq (output_sync_option, \"line\"))\n output_sync = OUTPUT_SYNC_LINE;\n else if (streq (output_sync_option, \"target\"))\n output_sync = OUTPUT_SYNC_TARGET;\n else if (streq (output_sync_option, \"recurse\"))\n output_sync = OUTPUT_SYNC_RECURSE;\n else\n OS (fatal, NILF,\n _(\"unknown output-sync type '%s'\"), output_sync_option);\n }\n\n if (sync_mutex)\n RECORD_SYNC_MUTEX (sync_mutex);\n#endif\n}\n\nvoid\ndecode_profile_options(void)\n{\n if (profile_option)\n {\n if (streq (profile_option, \"callgrind\"))\n profile_flag = PROFILE_CALLGRIND;\n else if (streq (profile_option, \"json\"))\n profile_flag = PROFILE_JSON;\n else\n profile_flag = PROFILE_DISABLED;\n }\n else\n {\n profile_flag = PROFILE_DISABLED;\n }\n\n if (profile_dir_opt == NULL)\n {\n profile_directory = starting_directory;\n }\n else\n {\n const char *dir = profile_dir_opt->list[profile_dir_opt->idx - 1];\n if (dir[0] != '/') {\n char directory[GET_PATH_MAX];\n sprintf(directory, \"%s/%s\", starting_directory, dir);\n profile_dir_opt->list[profile_dir_opt->idx - 1] = strcache_add(directory);\n }\n profile_directory = profile_dir_opt->list[profile_dir_opt->idx - 1];\n }\n}\n\n#ifdef WINDOWS32\n\n#ifndef NO_OUTPUT_SYNC\n\n/* This is called from start_job_command when it detects that\n output_sync option is in effect. The handle to the synchronization\n mutex is passed, as a string, to sub-makes via the --sync-mutex\n command-line argument. */\nvoid\nprepare_mutex_handle_string (sync_handle_t handle)\n{\n if (!sync_mutex)\n {\n /* Prepare the mutex handle string for our children. */\n /* 2 hex digits per byte + 2 characters for \"0x\" + null. */\n sync_mutex = xmalloc ((2 * sizeof (sync_handle_t)) + 2 + 1);\n sprintf (sync_mutex, \"0x%Ix\", handle);\n define_makeflags (1, 0);\n }\n}\n\n#endif /* NO_OUTPUT_SYNC */\n\n/*\n * HANDLE runtime exceptions by avoiding a requestor on the GUI. Capture\n * exception and print it to stderr instead.\n *\n * If ! DB_VERBOSE, just print a simple message and exit.\n * If DB_VERBOSE, print a more verbose message.\n * If compiled for DEBUG, let exception pass through to GUI so that\n * debuggers can attach.\n */\nLONG WINAPI\nhandle_runtime_exceptions (struct _EXCEPTION_POINTERS *exinfo)\n{\n PEXCEPTION_RECORD exrec = exinfo->ExceptionRecord;\n LPSTR cmdline = GetCommandLine ();\n LPSTR prg = strtok (cmdline, \" \");\n CHAR errmsg[1024];\n#ifdef USE_EVENT_LOG\n HANDLE hEventSource;\n LPTSTR lpszStrings[1];\n#endif\n\n if (! ISDB (DB_VERBOSE))\n {\n sprintf (errmsg,\n _(\"%s: Interrupt/Exception caught (code = 0x%lx, addr = 0x%p)\\n\"),\n prg, exrec->ExceptionCode, exrec->ExceptionAddress);\n fprintf (stderr, errmsg);\n exit (255);\n }\n\n sprintf (errmsg,\n _(\"\\nUnhandled exception filter called from program %s\\nExceptionCode = %lx\\nExceptionFlags = %lx\\nExceptionAddress = 0x%p\\n\"),\n prg, exrec->ExceptionCode, exrec->ExceptionFlags,\n exrec->ExceptionAddress);\n\n if (exrec->ExceptionCode == EXCEPTION_ACCESS_VIOLATION\n && exrec->NumberParameters >= 2)\n sprintf (&errmsg[strlen(errmsg)],\n (exrec->ExceptionInformation[0]\n ? _(\"Access violation: write operation at address 0x%p\\n\")\n : _(\"Access violation: read operation at address 0x%p\\n\")),\n (PVOID)exrec->ExceptionInformation[1]);\n\n /* turn this on if we want to put stuff in the event log too */\n#ifdef USE_EVENT_LOG\n hEventSource = RegisterEventSource (NULL, \"GNU Make\");\n lpszStrings[0] = errmsg;\n\n if (hEventSource != NULL)\n {\n ReportEvent (hEventSource, /* handle of event source */\n EVENTLOG_ERROR_TYPE, /* event type */\n 0, /* event category */\n 0, /* event ID */\n NULL, /* current user's SID */\n 1, /* strings in lpszStrings */\n 0, /* no bytes of raw data */\n lpszStrings, /* array of error strings */\n NULL); /* no raw data */\n\n (VOID) DeregisterEventSource (hEventSource);\n }\n#endif\n\n /* Write the error to stderr too */\n fprintf (stderr, errmsg);\n\n#ifdef DEBUG\n return EXCEPTION_CONTINUE_SEARCH;\n#else\n exit (255);\n return (255); /* not reached */\n#endif\n}\n\n/*\n * On WIN32 systems we don't have the luxury of a /bin directory that\n * is mapped globally to every drive mounted to the system. Since make could\n * be invoked from any drive, and we don't want to propagate /bin/sh\n * to every single drive. Allow ourselves a chance to search for\n * a value for default shell here (if the default path does not exist).\n */\n\nint\nfind_and_set_default_shell (const char *token)\n{\n int sh_found = 0;\n char *atoken = 0;\n const char *search_token;\n const char *tokend;\n PATH_VAR(sh_path);\n extern const char *default_shell;\n\n if (!token)\n search_token = default_shell;\n else\n search_token = atoken = xstrdup (token);\n\n /* If the user explicitly requests the DOS cmd shell, obey that request.\n However, make sure that's what they really want by requiring the value\n of SHELL either equal, or have a final path element of, \"cmd\" or\n \"cmd.exe\" case-insensitive. */\n tokend = search_token + strlen (search_token) - 3;\n if (((tokend == search_token\n || (tokend > search_token\n && (tokend[-1] == '/' || tokend[-1] == '\\\\')))\n && !strcasecmp (tokend, \"cmd\"))\n || ((tokend - 4 == search_token\n || (tokend - 4 > search_token\n && (tokend[-5] == '/' || tokend[-5] == '\\\\')))\n && !strcasecmp (tokend - 4, \"cmd.exe\")))\n {\n batch_mode_shell = 1;\n unixy_shell = 0;\n sprintf (sh_path, \"%s\", search_token);\n default_shell = xstrdup (w32ify (sh_path, 0));\n DB (DB_VERBOSE, (_(\"find_and_set_shell() setting default_shell = %s\\n\"),\n default_shell));\n sh_found = 1;\n }\n else if (!no_default_sh_exe\n && (token == NULL || !strcmp (search_token, default_shell)))\n {\n /* no new information, path already set or known */\n sh_found = 1;\n }\n else if (_access (search_token, 0) == 0)\n {\n /* search token path was found */\n sprintf (sh_path, \"%s\", search_token);\n default_shell = xstrdup (w32ify (sh_path, 0));\n DB (DB_VERBOSE, (_(\"find_and_set_shell() setting default_shell = %s\\n\"),\n default_shell));\n sh_found = 1;\n }\n else\n {\n char *p;\n struct variable *v = lookup_variable (STRING_SIZE_TUPLE (\"PATH\"));\n\n /* Search Path for shell */\n if (v && v->value)\n {\n char *ep;\n\n p = v->value;\n ep = strchr (p, PATH_SEPARATOR_CHAR);\n\n while (ep && *ep)\n {\n *ep = '\\0';\n\n sprintf (sh_path, \"%s/%s\", p, search_token);\n if (_access (sh_path, 0) == 0)\n {\n default_shell = xstrdup (w32ify (sh_path, 0));\n sh_found = 1;\n *ep = PATH_SEPARATOR_CHAR;\n\n /* terminate loop */\n p += strlen (p);\n }\n else\n {\n *ep = PATH_SEPARATOR_CHAR;\n p = ++ep;\n }\n\n ep = strchr (p, PATH_SEPARATOR_CHAR);\n }\n\n /* be sure to check last element of Path */\n if (p && *p)\n {\n sprintf (sh_path, \"%s/%s\", p, search_token);\n if (_access (sh_path, 0) == 0)\n {\n default_shell = xstrdup (w32ify (sh_path, 0));\n sh_found = 1;\n }\n }\n\n if (sh_found)\n DB (DB_VERBOSE,\n (_(\"find_and_set_shell() path search set default_shell = %s\\n\"),\n default_shell));\n }\n }\n\n /* naive test */\n if (!unixy_shell && sh_found\n && (strstr (default_shell, \"sh\") || strstr (default_shell, \"SH\")))\n {\n unixy_shell = 1;\n batch_mode_shell = 0;\n }\n\n#ifdef BATCH_MODE_ONLY_SHELL\n batch_mode_shell = 1;\n#endif\n\n free (atoken);\n\n return (sh_found);\n}\n#endif /* WINDOWS32 */\n\n#ifdef __MSDOS__\nstatic void\nmsdos_return_to_initial_directory (void)\n{\n if (directory_before_chdir)\n chdir (directory_before_chdir);\n}\n#endif /* __MSDOS__ */\n\nstatic void\nreset_jobserver (void)\n{\n jobserver_clear ();\n free (jobserver_auth);\n jobserver_auth = NULL;\n}\n\nint\nmain (int argc, const char **argv, char **envp)\n{\n static char *stdin_nm = 0;\n int makefile_status = MAKE_SUCCESS;\n PATH_VAR (current_directory);\n unsigned int restarts = 0;\n unsigned int syncing = 0;\n int argv_slots;\n#ifdef WINDOWS32\n const char *unix_path = NULL;\n const char *windows32_path = NULL;\n\n SetUnhandledExceptionFilter (handle_runtime_exceptions);\n\n /* start off assuming we have no shell */\n unixy_shell = 0;\n no_default_sh_exe = 1;\n#endif\n\n /* Useful for attaching debuggers, etc. */\n#ifdef SPIN\n SPIN (\"main-entry\");\n#endif\n\n argv0 = strdup(argv[0]);\n output_init (&make_sync);\n\n initialize_stopchar_map();\n\n#ifdef SET_STACK_SIZE\n /* Get rid of any avoidable limit on stack size. */\n {\n struct rlimit rlim;\n\n /* Set the stack limit huge so that alloca does not fail. */\n if (getrlimit (RLIMIT_STACK, &rlim) == 0\n && rlim.rlim_cur > 0 && rlim.rlim_cur < rlim.rlim_max)\n {\n stack_limit = rlim;\n rlim.rlim_cur = rlim.rlim_max;\n setrlimit (RLIMIT_STACK, &rlim);\n }\n else\n stack_limit.rlim_cur = 0;\n }\n#endif\n\n global_argv = argv;\n /* Needed for OS/2 */\n initialize_main (&argc, &argv);\n\n#ifdef MAKE_MAINTAINER_MODE\n /* In maintainer mode we always enable verification. */\n verify_flag = 1;\n#endif\n\n#if defined (__MSDOS__) && !defined (_POSIX_SOURCE)\n /* Request the most powerful version of 'system', to\n make up for the dumb default shell. */\n __system_flags = (__system_redirect\n | __system_use_shell\n | __system_allow_multiple_cmds\n | __system_allow_long_cmds\n | __system_handle_null_commands\n | __system_emulate_chdir);\n\n#endif\n\n /* Set up gettext/internationalization support. */\n setlocale (LC_ALL, \"\");\n /* The cast to void shuts up compiler warnings on systems that\n disable NLS. */\n (void)bindtextdomain (PACKAGE, LOCALEDIR);\n (void)textdomain (PACKAGE);\n\n#ifdef POSIX\n sigemptyset (&fatal_signal_set);\n#define ADD_SIG(sig) sigaddset (&fatal_signal_set, sig)\n#else\n#ifdef HAVE_SIGSETMASK\n fatal_signal_mask = 0;\n#define ADD_SIG(sig) fatal_signal_mask |= sigmask (sig)\n#else\n#define ADD_SIG(sig) (void)sig\n#endif\n#endif\n\n#define FATAL_SIG(sig) \\\n if (bsd_signal (sig, fatal_error_signal) == SIG_IGN) \\\n bsd_signal (sig, SIG_IGN); \\\n else \\\n ADD_SIG (sig);\n\n#ifdef SIGHUP\n FATAL_SIG (SIGHUP);\n#endif\n#ifdef SIGQUIT\n FATAL_SIG (SIGQUIT);\n#endif\n FATAL_SIG (SIGINT);\n FATAL_SIG (SIGTERM);\n\n#ifdef __MSDOS__\n /* Windows 9X delivers FP exceptions in child programs to their\n parent! We don't want Make to die when a child divides by zero,\n so we work around that lossage by catching SIGFPE. */\n FATAL_SIG (SIGFPE);\n#endif\n\n#ifdef SIGDANGER\n FATAL_SIG (SIGDANGER);\n#endif\n#ifdef SIGXCPU\n FATAL_SIG (SIGXCPU);\n#endif\n#ifdef SIGXFSZ\n FATAL_SIG (SIGXFSZ);\n#endif\n\n#undef FATAL_SIG\n\n /* Do not ignore the child-death signal. This must be done before\n any children could possibly be created; otherwise, the wait\n functions won't work on systems with the SVR4 ECHILD brain\n damage, if our invoker is ignoring this signal. */\n\n#ifdef HAVE_WAIT_NOHANG\n# if defined SIGCHLD\n (void) bsd_signal (SIGCHLD, SIG_DFL);\n# endif\n# if defined SIGCLD && SIGCLD != SIGCHLD\n (void) bsd_signal (SIGCLD, SIG_DFL);\n# endif\n#endif\n\n output_init (NULL);\n\n /* Figure out where this program lives. */\n\n if (argv[0] == 0)\n argv[0] = (char *)\"\";\n if (argv[0][0] == '\\0')\n program = \"make\";\n else\n {\n#if defined(HAVE_DOS_PATHS)\n const char* start = argv[0];\n\n /* Skip an initial drive specifier if present. */\n if (isalpha ((unsigned char)start[0]) && start[1] == ':')\n start += 2;\n\n if (start[0] == '\\0')\n program = \"make\";\n else\n {\n program = start + strlen (start);\n while (program > start && ! STOP_SET (program[-1], MAP_DIRSEP))\n --program;\n\n /* Remove the .exe extension if present. */\n {\n size_t len = strlen (program);\n if (len > 4 && streq (&program[len - 4], \".exe\"))\n program = xstrndup (program, len - 4);\n }\n }\n#else\n program = strrchr (argv[0], '/');\n if (program == 0)\n program = argv[0];\n else\n ++program;\n#endif\n }\n\n /* Set up to access user data (files). */\n user_access ();\n\n initialize_global_hash_tables ();\n\n /* Figure out where we are. */\n\n#ifdef WINDOWS32\n if (getcwd_fs (current_directory, GET_PATH_MAX) == 0)\n#else\n if (getcwd (current_directory, GET_PATH_MAX) == 0)\n#endif\n {\n#ifdef HAVE_GETCWD\n perror_with_name (\"getcwd\", \"\");\n#else\n OS (error, NILF, \"getwd: %s\", current_directory);\n#endif\n current_directory[0] = '\\0';\n directory_before_chdir = 0;\n }\n else\n directory_before_chdir = xstrdup (current_directory);\n\n#ifdef __MSDOS__\n /* Make sure we will return to the initial directory, come what may. */\n atexit (msdos_return_to_initial_directory);\n#endif\n\n /* Initialize the special variables. */\n define_variable_cname (\".VARIABLES\", \"\", o_default, 0)->special = 1;\n /* define_variable_cname (\".TARGETS\", \"\", o_default, 0)->special = 1; */\n define_variable_cname (\".RECIPEPREFIX\", \"\", o_default, 0)->special = 1;\n define_variable_cname (\".SHELLFLAGS\", \"-c\", o_default, 0);\n define_variable_cname (\".LOADED\", \"\", o_default, 0);\n\n /* Set up .FEATURES\n Use a separate variable because define_variable_cname() is a macro and\n some compilers (MSVC) don't like conditionals in macros. */\n {\n const char *features = \"target-specific order-only second-expansion\"\n \" else-if shortest-stem undefine oneshell nocomment\"\n \" grouped-target extra-prereqs\"\n#ifndef NO_ARCHIVES\n \" archives\"\n#endif\n#ifdef MAKE_JOBSERVER\n \" jobserver\"\n#endif\n#ifndef NO_OUTPUT_SYNC\n \" output-sync\"\n#endif\n#ifdef MAKE_SYMLINKS\n \" check-symlink\"\n#endif\n#ifdef HAVE_GUILE\n \" guile\"\n#endif\n#ifdef MAKE_LOAD\n \" load\"\n#endif\n#ifdef MAKE_MAINTAINER_MODE\n \" maintainer\"\n#endif\n ;\n\n define_variable_cname (\".FEATURES\", features, o_default, 0);\n }\n\n /* Configure GNU Guile support */\n guile_gmake_setup (NILF);\n\n /* Read in variables from the environment. It is important that this be\n done before $(MAKE) is figured out so its definitions will not be\n from the environment. */\n\n {\n unsigned int i;\n\n for (i = 0; envp[i] != 0; ++i)\n {\n struct variable *v;\n const char *ep = envp[i];\n /* By default, export all variables culled from the environment. */\n enum variable_export export = v_export;\n size_t len;\n\n while (! STOP_SET (*ep, MAP_EQUALS))\n ++ep;\n\n /* If there's no equals sign it's a malformed environment. Ignore. */\n if (*ep == '\\0')\n continue;\n\n /* Length of the variable name, and skip the '='. */\n len = ep++ - envp[i];\n\n /* If this is MAKE_RESTARTS, check to see if the \"already printed\n the enter statement\" flag is set. */\n if (len == 13 && strneq (envp[i], \"MAKE_RESTARTS\", 13))\n {\n if (*ep == '-')\n {\n OUTPUT_TRACED ();\n ++ep;\n }\n restarts = (unsigned int) atoi (ep);\n export = v_noexport;\n }\n\n v = define_variable (envp[i], len, ep, o_env, 1);\n\n /* POSIX says the value of SHELL set in the makefile won't change the\n value of SHELL given to subprocesses. */\n if (streq (v->name, \"SHELL\"))\n {\n export = v_noexport;\n shell_var.name = xstrdup (\"SHELL\");\n shell_var.length = 5;\n shell_var.value = xstrdup (ep);\n }\n\n v->export = export;\n }\n }\n\n /* Decode the switches. */\n decode_env_switches (STRING_SIZE_TUPLE (\"GNUMAKEFLAGS\"));\n\n /* Clear GNUMAKEFLAGS to avoid duplication. */\n define_variable_cname (\"GNUMAKEFLAGS\", \"\", o_env, 0);\n\n decode_env_switches (STRING_SIZE_TUPLE (\"MAKEFLAGS\"));\n\n#if 0\n /* People write things like:\n MFLAGS=\"CC=gcc -pipe\" \"CFLAGS=-g\"\n and we set the -p, -i and -e switches. Doesn't seem quite right. */\n decode_env_switches (STRING_SIZE_TUPLE (\"MFLAGS\"));\n#endif\n\n /* In output sync mode we need to sync any output generated by reading the\n makefiles, such as in $(info ...) or stderr from $(shell ...) etc. */\n\n syncing = make_sync.syncout = (output_sync == OUTPUT_SYNC_LINE\n || output_sync == OUTPUT_SYNC_TARGET);\n OUTPUT_SET (&make_sync);\n\n /* Parse the command line options. Remember the job slots set this way. */\n {\n int env_slots = arg_job_slots;\n arg_job_slots = INVALID_JOB_SLOTS;\n\n decode_switches (argc, (const char **)argv, 0);\n argv_slots = arg_job_slots;\n\n if (arg_job_slots == INVALID_JOB_SLOTS)\n arg_job_slots = env_slots;\n }\n\n /* Set a variable specifying whether stdout/stdin is hooked to a TTY. */\n#ifdef HAVE_ISATTY\n if (isatty (fileno (stdout)))\n if (! lookup_variable (STRING_SIZE_TUPLE (\"MAKE_TERMOUT\")))\n {\n const char *tty = TTYNAME (fileno (stdout));\n define_variable_cname (\"MAKE_TERMOUT\", tty ? tty : DEFAULT_TTYNAME,\n o_default, 0)->export = v_export;\n }\n if (isatty (fileno (stderr)))\n if (! lookup_variable (STRING_SIZE_TUPLE (\"MAKE_TERMERR\")))\n {\n const char *tty = TTYNAME (fileno (stderr));\n define_variable_cname (\"MAKE_TERMERR\", tty ? tty : DEFAULT_TTYNAME,\n o_default, 0)->export = v_export;\n }\n#endif\n\n /* Reset in case the switches changed our minds. */\n syncing = (output_sync == OUTPUT_SYNC_LINE\n || output_sync == OUTPUT_SYNC_TARGET);\n\n if (make_sync.syncout && ! syncing)\n output_close (&make_sync);\n\n make_sync.syncout = syncing;\n OUTPUT_SET (&make_sync);\n\n /* Figure out the level of recursion. */\n {\n struct variable *v = lookup_variable (STRING_SIZE_TUPLE (MAKELEVEL_NAME));\n if (v && v->value[0] != '\\0' && v->value[0] != '-')\n makelevel = (unsigned int) atoi (v->value);\n else\n makelevel = 0;\n\n v = lookup_variable (STRING_SIZE_TUPLE (MAKEPARENT_PID_NAME));\n if (v && v->value[0] != '\\0' && v->value[0] != '-')\n makeparent_pid = (pid_t) atoi (v->value);\n else\n makeparent_pid = (pid_t)0;\n\n v = lookup_variable (STRING_SIZE_TUPLE (MAKEPARENT_TARGET_NAME));\n if (v && v->value[0] != '\\0' && v->value[0] != '-') {\n makeparent_target = v->value;\n } else {\n makeparent_target = NULL;\n }\n }\n\n decode_trace_flags (tracing_opts);\n decode_verbosity_flags (verbosity_opts);\n\n /* FIXME: put into a subroutine like decode_trace_flags */\n if (post_mortem_flag) {\n debugger_on_error |= (DEBUGGER_ON_ERROR|DEBUGGER_ON_FATAL);\n debugger_enabled = 1;\n } else if (debugger_flag) {\n b_debugger_preread = false;\n job_slots = 1;\n i_debugger_stepping = 1;\n i_debugger_nexting = 0;\n debugger_enabled = 1;\n /* For now we'll do basic debugging. Later, \"stepping'\n will stop here while next won't - either way no printing.\n */\n db_level |= DB_BASIC | DB_CALL | DB_SHELL | DB_UPDATE_GOAL\n | DB_MAKEFILES;\n }\n /* debugging sets some things */\n if (debugger_opts) {\n const char **p;\n b_show_version = true;\n for (p = debugger_opts->list; *p != 0; ++p)\n {\n if (0 == strcmp(*p, \"preread\")) {\n b_debugger_preread = true;\n db_level |= DB_READ_MAKEFILES;\n }\n\n if (0 == strcmp(*p, \"goal\")) {\n b_debugger_goal = true;\n db_level |= DB_UPDATE_GOAL;\n }\n\n if ( 0 == strcmp(*p, \"full\") || b_debugger_preread || b_debugger_goal\n || 0 == strcmp(*p, \"preaction\") ) {\n job_slots = 1;\n i_debugger_stepping = 1;\n i_debugger_nexting = 0;\n debugger_enabled = 1;\n /* For now we'll do basic debugging. Later, \"stepping'\n will stop here while next won't - either way no printing.\n */\n db_level |= DB_BASIC | DB_CALL | DB_UPDATE_GOAL\n | b_debugger_goal ? 0 : DB_SHELL\n | DB_MAKEFILES;\n }\n if ( 0 == strcmp(*p, \"full\") || b_debugger_goal\n || 0 == strcmp(*p, \"error\") ) {\n debugger_on_error |= (DEBUGGER_ON_ERROR|DEBUGGER_ON_FATAL);\n } else if ( 0 == strcmp(*p, \"fatal\") ) {\n debugger_on_error |= DEBUGGER_ON_FATAL;\n }\n }\n#ifndef HAVE_LIBREADLINE\n O (error, NILF,\n \"warning: you specified a debugger option, but you don't have readline support\");\n O (error, NILF,\n \"debugger support compiled in. Debugger options will be ignored.\");\n#endif\n }\n\n /* Set always_make_flag if -B was given and we've not restarted already. */\n always_make_flag = always_make_set && (restarts == 0);\n\n /* Print version information, and exit. */\n if (print_version_flag)\n {\n print_version ();\n die (MAKE_SUCCESS);\n }\n\n if (ISDB (DB_BASIC) && makelevel == 0 && b_show_version)\n print_version ();\n\n /* Set the \"MAKE_COMMAND\" variable to the name we were invoked with.\n (If it is a relative pathname with a slash, prepend our directory name\n so the result will run the same program regardless of the current dir.\n If it is a name with no slash, we can only hope that PATH did not\n find it in the current directory.) */\n if (current_directory[0] != '\\0'\n && argv[0] != 0 && argv[0][0] != '/' && strchr (argv[0], '/') != 0\n )\n argv[0] = xstrdup (concat (3, current_directory, \"/\", argv[0]));\n\n /* We may move, but until we do, here we are. */\n starting_directory = current_directory;\n\n /* Update profile global options from cli options */\n decode_profile_options();\n if (profile_flag) profile_init(PACKAGE_TARNAME \" \" PACKAGE_VERSION, argv, arg_job_slots);\n\n /* Validate the arg_job_slots configuration before we define MAKEFLAGS so\n users get an accurate value in their makefiles.\n At this point arg_job_slots is the argv setting, if there is one, else\n the MAKEFLAGS env setting, if there is one. */\n\n if (jobserver_auth)\n {\n /* We're a child in an existing jobserver group. */\n if (argv_slots == INVALID_JOB_SLOTS)\n {\n /* There's no -j option on the command line: check authorization. */\n if (jobserver_parse_auth (jobserver_auth))\n {\n /* Success! Use the jobserver. */\n goto job_setup_complete;\n }\n\n /* Oops: we have jobserver-auth but it's invalid :(. */\n O (error, NILF, _(\"warning: jobserver unavailable: using -j1. Add '+' to parent make rule.\"));\n arg_job_slots = 1;\n }\n\n /* The user provided a -j setting on the command line so use it: we're\n the master make of a new jobserver group. */\n else if (!restarts)\n ON (error, NILF,\n _(\"warning: -j%d forced in submake: resetting jobserver mode.\"),\n argv_slots);\n\n /* We can't use our parent's jobserver, so reset. */\n reset_jobserver ();\n }\n\n job_setup_complete:\n\n /* The extra indirection through $(MAKE_COMMAND) is done\n for hysterical raisins. */\n\n define_variable_cname (\"MAKE_COMMAND\", argv[0], o_default, 0);\n define_variable_cname (\"MAKE\", \"$(MAKE_COMMAND)\", o_default, 1);\n\n if (command_variables != 0)\n {\n struct command_variable *cv;\n struct variable *v;\n size_t len = 0;\n char *value, *p;\n\n /* Figure out how much space will be taken up by the command-line\n variable definitions. */\n for (cv = command_variables; cv != 0; cv = cv->next)\n {\n v = cv->variable;\n len += 2 * strlen (v->name);\n if (! v->recursive)\n ++len;\n ++len;\n len += 2 * strlen (v->value);\n ++len;\n }\n\n /* Now allocate a buffer big enough and fill it. */\n p = value = alloca (len);\n for (cv = command_variables; cv != 0; cv = cv->next)\n {\n v = cv->variable;\n p = quote_for_env (p, v->name);\n if (! v->recursive)\n *p++ = ':';\n *p++ = '=';\n p = quote_for_env (p, v->value);\n *p++ = ' ';\n }\n p[-1] = '\\0'; /* Kill the final space and terminate. */\n\n /* Define an unchangeable variable with a name that no POSIX.2\n makefile could validly use for its own variable. */\n define_variable_cname (\"-*-command-variables-*-\", value, o_automatic, 0);\n\n /* Define the variable; this will not override any user definition.\n Normally a reference to this variable is written into the value of\n MAKEFLAGS, allowing the user to override this value to affect the\n exported value of MAKEFLAGS. In POSIX-pedantic mode, we cannot\n allow the user's setting of MAKEOVERRIDES to affect MAKEFLAGS, so\n a reference to this hidden variable is written instead. */\n define_variable_cname (\"MAKEOVERRIDES\", \"${-*-command-variables-*-}\",\n o_env, 1);\n }\n\n /* If there were -C flags, move ourselves about. */\n if (directories != 0)\n {\n unsigned int i;\n for (i = 0; directories->list[i] != 0; ++i)\n {\n const char *dir = directories->list[i];\n if (chdir (dir) < 0)\n pfatal_with_name (dir);\n }\n }\n\n /* Except under -s, always do -w in sub-makes and under -C. */\n if (!silent_flag && (directories != 0 || makelevel > 0))\n print_directory = 1;\n\n /* Let the user disable that with --no-print-directory. */\n if (inhibit_print_directory)\n print_directory = 0;\n\n /* If -R was given, set -r too (doesn't make sense otherwise!) */\n if (no_builtin_variables_flag)\n no_builtin_rules_flag = 1;\n\n /* Construct the list of include directories to search. */\n\n construct_include_path (include_directories == 0\n ? 0 : include_directories->list);\n\n /* If we chdir'ed, figure out where we are now. */\n if (directories)\n {\n#ifdef WINDOWS32\n if (getcwd_fs (current_directory, GET_PATH_MAX) == 0)\n#else\n if (getcwd (current_directory, GET_PATH_MAX) == 0)\n#endif\n {\n#ifdef HAVE_GETCWD\n perror_with_name (\"getcwd\", \"\");\n#else\n OS (error, NILF, \"getwd: %s\", current_directory);\n#endif\n starting_directory = 0;\n }\n else\n starting_directory = current_directory;\n }\n\n define_variable_cname (\"CURDIR\", current_directory, o_file, 0);\n\n /* Read any stdin makefiles into temporary files. */\n\n if (makefiles != 0)\n {\n unsigned int i;\n for (i = 0; i < makefiles->idx; ++i)\n if (makefiles->list[i][0] == '-' && makefiles->list[i][1] == '\\0')\n {\n /* This makefile is standard input. Since we may re-exec\n and thus re-read the makefiles, we read standard input\n into a temporary file and read from that. */\n FILE *outfile;\n char *template;\n const char *tmpdir;\n\n if (stdin_nm)\n O (fatal, NILF,\n _(\"Makefile from standard input specified twice.\"));\n\n#ifdef P_tmpdir\n# define DEFAULT_TMPDIR P_tmpdir\n#else\n# define DEFAULT_TMPDIR \"/tmp\"\n#endif\n#define DEFAULT_TMPFILE \"GmXXXXXX\"\n\n if (((tmpdir = getenv (\"TMPDIR\")) == NULL || *tmpdir == '\\0')\n )\n tmpdir = DEFAULT_TMPDIR;\n\n template = alloca (strlen (tmpdir) + CSTRLEN (DEFAULT_TMPFILE) + 2);\n strcpy (template, tmpdir);\n\n#ifdef HAVE_DOS_PATHS\n if (strchr (\"/\\\\\", template[strlen (template) - 1]) == NULL)\n strcat (template, \"/\");\n#else\n# ifndef VMS\n if (template[strlen (template) - 1] != '/')\n strcat (template, \"/\");\n# endif /* !VMS */\n#endif /* !HAVE_DOS_PATHS */\n\n strcat (template, DEFAULT_TMPFILE);\n outfile = get_tmpfile (&stdin_nm, template);\n if (outfile == 0)\n pfatal_with_name (_(\"fopen (temporary file)\"));\n while (!feof (stdin) && ! ferror (stdin))\n {\n char buf[2048];\n size_t n = fread (buf, 1, sizeof (buf), stdin);\n if (n > 0 && fwrite (buf, 1, n, outfile) != n)\n pfatal_with_name (_(\"fwrite (temporary file)\"));\n }\n fclose (outfile);\n\n /* Replace the name that read_all_makefiles will\n see with the name of the temporary file. */\n makefiles->list[i] = strcache_add (stdin_nm);\n\n /* Make sure the temporary file will not be remade. */\n {\n struct file *f = enter_file (strcache_add (stdin_nm));\n f->updated = 1;\n f->update_status = us_success;\n f->command_state = cs_finished;\n /* Can't be intermediate, or it'll be removed too early for\n make re-exec. */\n f->intermediate = 0;\n f->dontcare = 0;\n }\n }\n }\n\n#ifndef __EMX__ /* Don't use a SIGCHLD handler for OS/2 */\n#if !defined(HAVE_WAIT_NOHANG) || defined(MAKE_JOBSERVER)\n /* Set up to handle children dying. This must be done before\n reading in the makefiles so that 'shell' function calls will work.\n\n If we don't have a hanging wait we have to fall back to old, broken\n functionality here and rely on the signal handler and counting\n children.\n\n If we're using the jobs pipe we need a signal handler so that SIGCHLD is\n not ignored; we need it to interrupt the read(2) of the jobserver pipe if\n we're waiting for a token.\n\n If none of these are true, we don't need a signal handler at all. */\n {\n# if defined SIGCHLD\n bsd_signal (SIGCHLD, child_handler);\n# endif\n# if defined SIGCLD && SIGCLD != SIGCHLD\n bsd_signal (SIGCLD, child_handler);\n# endif\n }\n\n#ifdef HAVE_PSELECT\n /* If we have pselect() then we need to block SIGCHLD so it's deferred. */\n {\n sigset_t block;\n sigemptyset (&block);\n sigaddset (&block, SIGCHLD);\n if (sigprocmask (SIG_SETMASK, &block, NULL) < 0)\n pfatal_with_name (\"sigprocmask(SIG_SETMASK, SIGCHLD)\");\n }\n#endif\n\n#endif\n#endif\n\n /* Let the user send us SIGUSR1 to toggle the -d flag during the run. */\n#ifdef SIGUSR1\n bsd_signal (SIGUSR1, debug_signal_handler);\n#endif\n\n /* Define the initial list of suffixes for old-style rules. */\n set_default_suffixes ();\n\n /* Define the file rules for the built-in suffix rules. These will later\n be converted into pattern rules. We used to do this in\n install_default_implicit_rules, but since that happens after reading\n makefiles, it results in the built-in pattern rules taking precedence\n over makefile-specified suffix rules, which is wrong. */\n install_default_suffix_rules ();\n\n /* Define some internal and special variables. */\n define_automatic_variables ();\n\n /* Set up the MAKEFLAGS and MFLAGS variables for makefiles to see.\n Initialize it to be exported but allow the makefile to reset it. */\n define_makeflags (0, 0)->export = v_export;\n\n /* Define the default variables. */\n define_default_variables ();\n\n default_file = enter_file (strcache_add (\".DEFAULT\"));\n\n default_goal_var = define_variable_cname (\".DEFAULT_GOAL\", \"\", o_file, 0);\n\n /* Evaluate all strings provided with --eval.\n Also set up the $(-*-eval-flags-*-) variable. */\n\n if (eval_strings)\n {\n char *p, *value;\n unsigned int i;\n size_t len = (CSTRLEN (\"--eval=\") + 1) * eval_strings->idx;\n\n for (i = 0; i < eval_strings->idx; ++i)\n {\n p = xstrdup (eval_strings->list[i]);\n len += 2 * strlen (p);\n eval_buffer (p, NULL);\n free (p);\n }\n\n p = value = alloca (len);\n for (i = 0; i < eval_strings->idx; ++i)\n {\n strcpy (p, \"--eval=\");\n p += CSTRLEN (\"--eval=\");\n p = quote_for_env (p, eval_strings->list[i]);\n *(p++) = ' ';\n }\n p[-1] = '\\0';\n\n define_variable_cname (\"-*-eval-flags-*-\", value, o_automatic, 0);\n }\n\n /* Read all the makefiles. */\n\n read_makefiles = read_all_makefiles (makefiles == 0 ? 0 : makefiles->list);\n\n#ifdef WINDOWS32\n /* look one last time after reading all Makefiles */\n if (no_default_sh_exe)\n no_default_sh_exe = !find_and_set_default_shell (NULL);\n#endif /* WINDOWS32 */\n\n#if defined (__MSDOS__) || defined (__EMX__) || defined (VMS)\n /* We need to know what kind of shell we will be using. */\n {\n extern int _is_unixy_shell (const char *_path);\n struct variable *shv = lookup_variable (STRING_SIZE_TUPLE (\"SHELL\"));\n extern int unixy_shell;\n extern const char *default_shell;\n\n if (shv && *shv->value)\n {\n char *shell_path = recursively_expand (shv);\n\n if (shell_path && _is_unixy_shell (shell_path))\n unixy_shell = 1;\n else\n unixy_shell = 0;\n if (shell_path)\n default_shell = shell_path;\n }\n }\n#endif /* __MSDOS__ || __EMX__ */\n\n {\n int old_builtin_rules_flag = no_builtin_rules_flag;\n int old_builtin_variables_flag = no_builtin_variables_flag;\n int old_arg_job_slots = arg_job_slots;\n\n arg_job_slots = INVALID_JOB_SLOTS;\n\n /* Decode switches again, for variables set by the makefile. */\n decode_env_switches (STRING_SIZE_TUPLE (\"GNUMAKEFLAGS\"));\n\n /* Clear GNUMAKEFLAGS to avoid duplication. */\n define_variable_cname (\"GNUMAKEFLAGS\", \"\", o_override, 0);\n\n decode_env_switches (STRING_SIZE_TUPLE (\"MAKEFLAGS\"));\n#if 0\n decode_env_switches (STRING_SIZE_TUPLE (\"MFLAGS\"));\n#endif\n\n /* If -j is not set in the makefile, or it was set on the command line,\n reset to use the previous value. */\n if (arg_job_slots == INVALID_JOB_SLOTS || argv_slots != INVALID_JOB_SLOTS)\n arg_job_slots = old_arg_job_slots;\n\n else if (jobserver_auth)\n {\n /* Makefile MAKEFLAGS set -j, but we already have a jobserver.\n Make us the master of a new jobserver group. */\n if (!restarts)\n ON (error, NILF,\n _(\"warning: -j%d forced in makefile: resetting jobserver mode.\"),\n arg_job_slots);\n\n /* We can't use our parent's jobserver, so reset. */\n reset_jobserver ();\n }\n\n /* Reset in case the switches changed our mind. */\n syncing = (output_sync == OUTPUT_SYNC_LINE\n || output_sync == OUTPUT_SYNC_TARGET);\n\n if (make_sync.syncout && ! syncing)\n output_close (&make_sync);\n\n make_sync.syncout = syncing;\n OUTPUT_SET (&make_sync);\n\n /* If we've disabled builtin rules, get rid of them. */\n if (no_builtin_rules_flag && ! old_builtin_rules_flag)\n {\n if (suffix_file->builtin)\n {\n free_dep_chain (suffix_file->deps);\n suffix_file->deps = 0;\n }\n define_variable_cname (\"SUFFIXES\", \"\", o_default, 0);\n }\n\n /* If we've disabled builtin variables, get rid of them. */\n if (no_builtin_variables_flag && ! old_builtin_variables_flag)\n undefine_default_variables ();\n }\n\n /* Final jobserver configuration.\n\n If we have jobserver_auth then we are a client in an existing jobserver\n group, that's already been verified OK above. If we don't have\n jobserver_auth and jobserver is enabled, then start a new jobserver.\n\n arg_job_slots = INVALID_JOB_SLOTS if we don't want -j in MAKEFLAGS\n\n arg_job_slots = # of jobs of parallelism\n\n job_slots = 0 for no limits on jobs, or when limiting via jobserver.\n\n job_slots = 1 for standard non-parallel mode.\n\n job_slots >1 for old-style parallelism without jobservers. */\n\n if (jobserver_auth)\n job_slots = 0;\n else if (arg_job_slots == INVALID_JOB_SLOTS)\n job_slots = 1;\n else\n job_slots = arg_job_slots;\n\n /* If we have >1 slot at this point, then we're a top-level make.\n Set up the jobserver.\n\n Every make assumes that it always has one job it can run. For the\n submakes it's the token they were given by their parent. For the top\n make, we just subtract one from the number the user wants. */\n\n if (job_slots > 1 && jobserver_setup (job_slots - 1))\n {\n /* Fill in the jobserver_auth for our children. */\n jobserver_auth = jobserver_get_auth ();\n\n if (jobserver_auth)\n {\n /* We're using the jobserver so set job_slots to 0. */\n master_job_slots = job_slots;\n job_slots = 0;\n }\n }\n\n /* If we're not using parallel jobs, then we don't need output sync.\n This is so people can enable output sync in GNUMAKEFLAGS or similar, but\n not have it take effect unless parallel builds are enabled. */\n if (syncing && job_slots == 1)\n {\n OUTPUT_UNSET ();\n output_close (&make_sync);\n syncing = 0;\n output_sync = OUTPUT_SYNC_NONE;\n }\n\n#ifndef MAKE_SYMLINKS\n if (check_symlink_flag)\n {\n O (error, NILF, _(\"Symbolic links not supported: disabling -L.\"));\n check_symlink_flag = 0;\n }\n#endif\n\n /* Set up MAKEFLAGS and MFLAGS again, so they will be right. */\n\n define_makeflags (1, 0);\n\n /* Make each 'struct goaldep' point at the 'struct file' for the file\n depended on. Also do magic for special targets. */\n\n snap_deps ();\n\n /* Convert old-style suffix rules to pattern rules. It is important to\n do this before installing the built-in pattern rules below, so that\n makefile-specified suffix rules take precedence over built-in pattern\n rules. */\n\n convert_to_pattern ();\n\n /* Install the default implicit pattern rules.\n This used to be done before reading the makefiles.\n But in that case, built-in pattern rules were in the chain\n before user-defined ones, so they matched first. */\n\n install_default_implicit_rules ();\n\n /* Compute implicit rule limits and do magic for pattern rules. */\n\n snap_implicit_rules ();\n\n /* Construct the listings of directories in VPATH lists. */\n\n build_vpath_lists ();\n\n /* Mark files given with -o flags as very old and as having been updated\n already, and files given with -W flags as brand new (time-stamp as far\n as possible into the future). If restarts is set we'll do -W later. */\n\n if (old_files != 0)\n {\n const char **p;\n for (p = old_files->list; *p != 0; ++p)\n {\n struct file *f = enter_file (*p);\n f->last_mtime = f->mtime_before_update = OLD_MTIME;\n f->updated = 1;\n f->update_status = us_success;\n f->command_state = cs_finished;\n }\n }\n\n if (!restarts && new_files != 0)\n {\n const char **p;\n for (p = new_files->list; *p != 0; ++p)\n {\n struct file *f = enter_file (*p);\n f->last_mtime = f->mtime_before_update = NEW_MTIME;\n }\n }\n\n /* Initialize the remote job module. */\n remote_setup ();\n\n /* Dump any output we've collected. */\n\n OUTPUT_UNSET ();\n output_close (&make_sync);\n\n if (read_makefiles)\n {\n /* Update any makefiles if necessary. */\n\n FILE_TIMESTAMP *makefile_mtimes;\n char **aargv = NULL;\n const char **nargv;\n int nargc;\n enum update_status status;\n\n DB (DB_BASIC, (_(\"Updating makefiles...\\n\")));\n\n {\n struct goaldep *d;\n unsigned int num_mkfiles = 0;\n for (d = read_makefiles; d != NULL; d = d->next)\n ++num_mkfiles;\n\n makefile_mtimes = alloca (num_mkfiles * sizeof (FILE_TIMESTAMP));\n }\n\n /* Remove any makefiles we don't want to try to update. Record the\n current modtimes of the others so we can compare them later. */\n {\n struct goaldep *d = read_makefiles;\n struct goaldep *last = NULL;\n unsigned int mm_idx = 0;\n\n while (d != 0)\n {\n struct file *f;\n\n for (f = d->file->double_colon; f != NULL; f = f->prev)\n if (f->deps == 0 && f->cmds != 0)\n break;\n\n if (f)\n {\n /* This makefile is a :: target with commands, but no\n dependencies. So, it will always be remade. This might\n well cause an infinite loop, so don't try to remake it.\n (This will only happen if your makefiles are written\n exceptionally stupidly; but if you work for Athena, that's\n how you write your makefiles.) */\n\n DB (DB_VERBOSE,\n (_(\"Makefile '%s' might loop; not remaking it.\\n\"),\n f->name));\n\n if (last)\n last->next = d->next;\n else\n read_makefiles = d->next;\n\n /* Free the storage. */\n free_goaldep (d);\n\n d = last ? last->next : read_makefiles;\n }\n else\n {\n makefile_mtimes[mm_idx++] = file_mtime_no_search (d->file);\n last = d;\n d = d->next;\n }\n }\n }\n\n /* Set up 'MAKEFLAGS' specially while remaking makefiles. */\n define_makeflags (1, 1);\n\n {\n int orig_db_level = db_level;\n\n if (! ISDB (DB_MAKEFILES))\n db_level = DB_NONE;\n\n rebuilding_makefiles = 1;\n\tstatus = update_goal_chain (read_makefiles);\n rebuilding_makefiles = 0;\n\n db_level = orig_db_level;\n }\n\n switch (status)\n {\n case us_question:\n /* The only way this can happen is if the user specified -q and asked\n for one of the makefiles to be remade as a target on the command\n line. Since we're not actually updating anything with -q we can\n treat this as \"did nothing\". */\n\n case us_none:\n /* Did nothing. */\n break;\n\n case us_failed:\n /* Failed to update. Figure out if we care. */\n {\n /* Nonzero if any makefile was successfully remade. */\n int any_remade = 0;\n /* Nonzero if any makefile we care about failed\n in updating or could not be found at all. */\n int any_failed = 0;\n unsigned int i;\n struct goaldep *d;\n\n for (i = 0, d = read_makefiles; d != 0; ++i, d = d->next)\n {\n if (d->file->updated)\n {\n /* This makefile was updated. */\n if (d->file->update_status == us_success)\n {\n /* It was successfully updated. */\n any_remade |= (file_mtime_no_search (d->file)\n != makefile_mtimes[i]);\n }\n else if (! (d->flags & RM_DONTCARE))\n {\n FILE_TIMESTAMP mtime;\n /* The update failed and this makefile was not\n from the MAKEFILES variable, so we care. */\n OS (error, NILF, _(\"Failed to remake makefile '%s'.\"),\n d->file->name);\n mtime = file_mtime_no_search (d->file);\n any_remade |= (mtime != NONEXISTENT_MTIME\n && mtime != makefile_mtimes[i]);\n makefile_status = MAKE_FAILURE;\n }\n }\n else\n /* This makefile was not found at all. */\n if (! (d->flags & RM_DONTCARE))\n {\n const char *dnm = dep_name (d);\n size_t l = strlen (dnm);\n\n /* This is a makefile we care about. See how much. */\n if (d->flags & RM_INCLUDED)\n /* An included makefile. We don't need to die, but we\n do want to complain. */\n error (NILF, l,\n _(\"Included makefile '%s' was not found.\"), dnm);\n else\n {\n /* A normal makefile. We must die later. */\n error (NILF, l,\n _(\"Makefile '%s' was not found\"), dnm);\n any_failed = 1;\n }\n }\n }\n\n if (any_remade)\n goto re_exec;\n if (any_failed)\n die (MAKE_FAILURE);\n break;\n }\n\n case us_success:\n re_exec:\n /* Updated successfully. Re-exec ourselves. */\n\n remove_intermediates (0);\n\n if (print_data_base_flag)\n print_data_base ();\n\n clean_jobserver (0);\n\n if (makefiles != 0)\n {\n /* These names might have changed. */\n int i, j = 0;\n for (i = 1; i < argc; ++i)\n if (strneq (argv[i], \"-f\", 2)) /* XXX */\n {\n if (argv[i][2] == '\\0')\n /* This cast is OK since we never modify argv. */\n argv[++i] = (char *) makefiles->list[j];\n else\n argv[i] = xstrdup (concat (2, \"-f\", makefiles->list[j]));\n ++j;\n }\n }\n\n /* Add -o option for the stdin temporary file, if necessary. */\n nargc = argc;\n if (stdin_nm)\n {\n void *m = xmalloc ((nargc + 2) * sizeof (char *));\n aargv = m;\n memcpy (aargv, argv, argc * sizeof (char *));\n aargv[nargc++] = xstrdup (concat (2, \"-o\", stdin_nm));\n aargv[nargc] = 0;\n nargv = m;\n }\n else\n nargv = (const char**)argv;\n\n if (directories != 0 && directories->idx > 0)\n {\n int bad = 1;\n if (directory_before_chdir != 0)\n {\n if (chdir (directory_before_chdir) < 0)\n perror_with_name (\"chdir\", \"\");\n else\n bad = 0;\n }\n if (bad)\n O (fatal, NILF,\n _(\"Couldn't change back to original directory.\"));\n }\n\n ++restarts;\n\n if (ISDB (DB_BASIC))\n {\n const char **p;\n printf (_(\"Re-executing[%u]:\"), restarts);\n for (p = nargv; *p != 0; ++p)\n printf (\" %s\", *p);\n putchar ('\\n');\n fflush (stdout);\n }\n\n {\n char **p;\n for (p = environ; *p != 0; ++p)\n {\n if (strneq (*p, MAKELEVEL_NAME \"=\", MAKELEVEL_LENGTH+1))\n {\n *p = alloca (40);\n sprintf (*p, \"%s=%u\", MAKELEVEL_NAME, makelevel);\n }\n else if (strneq (*p, \"MAKE_RESTARTS=\", CSTRLEN (\"MAKE_RESTARTS=\")))\n {\n *p = alloca (40);\n sprintf (*p, \"MAKE_RESTARTS=%s%u\",\n OUTPUT_IS_TRACED () ? \"-\" : \"\", restarts);\n restarts = 0;\n }\n }\n }\n\n /* If we didn't set the restarts variable yet, add it. */\n if (restarts)\n {\n char *b = alloca (40);\n sprintf (b, \"MAKE_RESTARTS=%s%u\",\n OUTPUT_IS_TRACED () ? \"-\" : \"\", restarts);\n putenv (b);\n }\n\n fflush (stdout);\n fflush (stderr);\n\n /* The exec'd \"child\" will be another make, of course. */\n jobserver_pre_child(1);\n\n#ifdef SET_STACK_SIZE\n /* Reset limits, if necessary. */\n if (stack_limit.rlim_cur)\n setrlimit (RLIMIT_STACK, &stack_limit);\n#endif\n exec_command ((char **)nargv, environ);\n\n /* We shouldn't get here but just in case. */\n jobserver_post_child(1);\n free (aargv);\n break;\n }\n }\n\n /* Set up 'MAKEFLAGS' again for the normal targets. */\n define_makeflags (1, 0);\n\n /* Set always_make_flag if -B was given. */\n always_make_flag = always_make_set;\n\n /* If restarts is set we haven't set up -W files yet, so do that now. */\n if (restarts && new_files != 0)\n {\n const char **p;\n for (p = new_files->list; *p != 0; ++p)\n {\n struct file *f = enter_file (*p);\n f->last_mtime = f->mtime_before_update = NEW_MTIME;\n }\n }\n\n /* If there is a temp file from reading a makefile from stdin, get rid of\n it now. */\n if (stdin_nm && unlink (stdin_nm) < 0 && errno != ENOENT)\n perror_with_name (_(\"unlink (temporary file): \"), stdin_nm);\n\n /* If there were no command-line goals, use the default. */\n if (goals == 0)\n {\n char *p;\n\n if (default_goal_var->recursive)\n p = variable_expand (default_goal_var->value);\n else\n {\n p = variable_buffer_output (variable_buffer, default_goal_var->value,\n strlen (default_goal_var->value));\n *p = '\\0';\n p = variable_buffer;\n }\n\n if (*p != '\\0')\n {\n struct file *f = lookup_file (p);\n\n /* If .DEFAULT_GOAL is a non-existent target, enter it into the\n table and let the standard logic sort it out. */\n if (f == 0)\n {\n struct nameseq *ns;\n\n ns = PARSE_SIMPLE_SEQ (&p, struct nameseq);\n if (ns)\n {\n /* .DEFAULT_GOAL should contain one target. */\n if (ns->next != 0)\n O (fatal, NILF,\n _(\".DEFAULT_GOAL contains more than one target\"));\n\n f = enter_file (strcache_add (ns->name));\n\n ns->name = 0; /* It was reused by enter_file(). */\n free_ns_chain (ns);\n }\n }\n\n if (f)\n {\n goals = alloc_goaldep ();\n goals->file = f;\n }\n }\n }\n else\n lastgoal->next = 0;\n\n\n if (!goals)\n {\n struct variable *v = lookup_variable (STRING_SIZE_TUPLE (\"MAKEFILE_LIST\"));\n if (v && v->value && v->value[0] != '\\0')\n O (fatal, NILF, _(\"No targets\"));\n\n O (fatal, NILF, _(\"No targets specified and no makefile found\"));\n }\n if (show_task_comments_flag) {\n dbg_cmd_info_targets(show_task_comments_flag\n\t\t\t ? INFO_TARGET_TASKS_WITH_COMMENTS\n\t\t\t : INFO_TARGET_TASKS);\n die(0);\n }\n if (show_tasks_flag) {\n dbg_cmd_info_tasks();\n die(0);\n } else if (show_targets_flag) {\n dbg_cmd_info_targets(INFO_TARGET_NAME);\n die(0);\n }\n\n /* Update the goals. */\n\n DB (DB_BASIC, (_(\"Updating goal targets...\\n\")));\n\n {\n switch (update_goal_chain (goals))\n {\n case us_none:\n /* Nothing happened. */\n /* FALLTHROUGH */\n case us_success:\n /* Keep the previous result. */\n break;\n case us_question:\n /* We are under -q and would run some commands. */\n makefile_status = MAKE_TROUBLE;\n break;\n case us_failed:\n /* Updating failed. POSIX.2 specifies exit status >1 for this; */\n makefile_status = MAKE_FAILURE;\n break;\n }\n\n /* If we detected some clock skew, generate one last warning */\n if (clock_skew_detected)\n O (error, NILF,\n _(\"warning: Clock skew detected. Your build may be incomplete.\"));\n\n /* Exit. */\n die (makefile_status);\n }\n\n /* NOTREACHED */\n exit (MAKE_SUCCESS);\n}\n\f\n/* Parsing of arguments, decoding of switches. */\n\nstatic char options[1 + sizeof (switches) / sizeof (switches[0]) * 3];\nstatic struct option long_options[(sizeof (switches) / sizeof (switches[0])) +\n (sizeof (long_option_aliases) /\n sizeof (long_option_aliases[0]))];\n\n/* Fill in the string and vector for getopt. */\nstatic void\ninit_switches (void)\n{\n char *p;\n unsigned int c;\n unsigned int i;\n\n if (options[0] != '\\0')\n /* Already done. */\n return;\n\n p = options;\n\n /* Return switch and non-switch args in order, regardless of\n POSIXLY_CORRECT. Non-switch args are returned as option 1. */\n *p++ = '-';\n\n for (i = 0; switches[i].c != '\\0'; ++i)\n {\n long_options[i].name = (char *) (switches[i].long_name == 0 ? \"\" :\n switches[i].long_name);\n long_options[i].flag = 0;\n long_options[i].val = switches[i].c;\n if (short_option (switches[i].c))\n *p++ = (char) switches[i].c;\n switch (switches[i].type)\n {\n case flag:\n case flag_off:\n case ignore:\n long_options[i].has_arg = no_argument;\n break;\n\n case string:\n case strlist:\n case filename:\n case positive_int:\n case floating:\n if (short_option (switches[i].c))\n *p++ = ':';\n if (switches[i].noarg_value != 0)\n {\n if (short_option (switches[i].c))\n *p++ = ':';\n long_options[i].has_arg = optional_argument;\n }\n else\n long_options[i].has_arg = required_argument;\n break;\n }\n }\n *p = '\\0';\n for (c = 0; c < (sizeof (long_option_aliases) /\n sizeof (long_option_aliases[0]));\n ++c)\n long_options[i++] = long_option_aliases[c];\n long_options[i].name = 0;\n}\n\n\n/* Non-option argument. It might be a variable definition. */\nstatic void\nhandle_non_switch_argument (const char *arg, int env)\n{\n struct variable *v;\n\n if (arg[0] == '-' && arg[1] == '\\0')\n /* Ignore plain '-' for compatibility. */\n return;\n\n v = try_variable_definition (0, arg, o_command, 0);\n if (v != 0)\n {\n /* It is indeed a variable definition. If we don't already have this\n one, record a pointer to the variable for later use in\n define_makeflags. */\n struct command_variable *cv;\n\n for (cv = command_variables; cv != 0; cv = cv->next)\n if (cv->variable == v)\n break;\n\n if (! cv)\n {\n cv = xmalloc (sizeof (*cv));\n cv->variable = v;\n cv->next = command_variables;\n command_variables = cv;\n }\n }\n else if (! env)\n {\n /* Not an option or variable definition; it must be a goal\n target! Enter it as a file and add it to the dep chain of\n goals. */\n struct file *f = enter_file (strcache_add (expand_command_line_file (arg)));\n f->cmd_target = 1;\n\n if (goals == 0)\n {\n goals = alloc_goaldep ();\n lastgoal = goals;\n }\n else\n {\n lastgoal->next = alloc_goaldep ();\n lastgoal = lastgoal->next;\n }\n\n lastgoal->file = f;\n\n {\n /* Add this target name to the MAKECMDGOALS variable. */\n struct variable *gv;\n const char *value;\n\n gv = lookup_variable (STRING_SIZE_TUPLE (\"MAKECMDGOALS\"));\n if (gv == 0)\n value = f->name;\n else\n {\n /* Paste the old and new values together */\n size_t oldlen, newlen;\n char *vp;\n\n oldlen = strlen (gv->value);\n newlen = strlen (f->name);\n vp = alloca (oldlen + 1 + newlen + 1);\n memcpy (vp, gv->value, oldlen);\n vp[oldlen] = ' ';\n memcpy (&vp[oldlen + 1], f->name, newlen + 1);\n value = vp;\n }\n define_variable_cname (\"MAKECMDGOALS\", value, o_default, 0);\n }\n }\n}\n\n/* Print a nice usage method. */\n\nstatic void\nprint_usage (int bad)\n{\n const char *const *cpp;\n FILE *usageto;\n\n if (print_version_flag)\n print_version ();\n\n usageto = bad ? stderr : stdout;\n\n fprintf (usageto, _(\"Usage: %s [options] [target] ...\\n\"), program);\n\n for (cpp = usage; *cpp; ++cpp)\n fputs (_(*cpp), usageto);\n\n if (!remote_description || *remote_description == '\\0')\n fprintf (usageto, _(\"\\nThis program built for %s\\n\"), make_host);\n else\n fprintf (usageto, _(\"\\nThis program built for %s (%s)\\n\"),\n make_host, remote_description);\n\n fprintf (usageto, _(\"Report bugs to https://github.com/rocky/remake/issues\\n\"));\n}\n\n/* Decode switches from ARGC and ARGV.\n They came from the environment if ENV is nonzero. */\n\nstatic void\ndecode_switches (int argc, const char **argv, int env)\n{\n int bad = 0;\n const struct command_switch *cs;\n struct stringlist *sl;\n int c;\n\n /* getopt does most of the parsing for us.\n First, get its vectors set up. */\n\n init_switches ();\n\n /* Let getopt produce error messages for the command line,\n but not for options from the environment. */\n opterr = !env;\n /* Reset getopt's state. */\n optind = 0;\n\n while (optind < argc)\n {\n const char *coptarg;\n\n /* Parse the next argument. */\n c = getopt_long (argc, (char*const*)argv, options, long_options, NULL);\n coptarg = optarg;\n if (c == EOF)\n /* End of arguments, or \"--\" marker seen. */\n break;\n else if (c == 1)\n /* An argument not starting with a dash. */\n handle_non_switch_argument (coptarg, env);\n else if (c == '?')\n /* Bad option. We will print a usage message and die later.\n But continue to parse the other options so the user can\n see all he did wrong. */\n bad = 1;\n else\n for (cs = switches; cs->c != '\\0'; ++cs)\n if (cs->c == c)\n {\n /* Whether or not we will actually do anything with\n this switch. We test this individually inside the\n switch below rather than just once outside it, so that\n options which are to be ignored still consume args. */\n int doit = !env || cs->env;\n\n switch (cs->type)\n {\n default:\n abort ();\n\n case ignore:\n break;\n\n case flag:\n case flag_off:\n if (doit)\n *(int *) cs->value_ptr = cs->type == flag;\n break;\n\n case string:\n case strlist:\n case filename:\n if (!doit)\n break;\n\n if (! coptarg)\n coptarg = xstrdup (cs->noarg_value);\n else if (*coptarg == '\\0')\n {\n char opt[2] = \"c\";\n const char *op = opt;\n\n if (short_option (cs->c))\n opt[0] = (char) cs->c;\n else\n op = cs->long_name;\n\n error (NILF, strlen (op),\n _(\"the '%s%s' option requires a non-empty string argument\"),\n short_option (cs->c) ? \"-\" : \"--\", op);\n bad = 1;\n break;\n }\n\n if (cs->type == string)\n {\n char **val = (char **)cs->value_ptr;\n free (*val);\n *val = xstrdup (coptarg);\n break;\n }\n\n sl = *(struct stringlist **) cs->value_ptr;\n if (sl == 0)\n {\n sl = xmalloc (sizeof (struct stringlist));\n sl->max = 5;\n sl->idx = 0;\n sl->list = xmalloc (5 * sizeof (char *));\n *(struct stringlist **) cs->value_ptr = sl;\n }\n else if (sl->idx == sl->max - 1)\n {\n sl->max += 5;\n /* MSVC erroneously warns without a cast here. */\n sl->list = xrealloc ((void *)sl->list,\n sl->max * sizeof (char *));\n }\n if (cs->type == filename)\n sl->list[sl->idx++] = expand_command_line_file (coptarg);\n else\n sl->list[sl->idx++] = xstrdup (coptarg);\n sl->list[sl->idx] = 0;\n break;\n\n case positive_int:\n /* See if we have an option argument; if we do require that\n it's all digits, not something like \"10foo\". */\n if (coptarg == 0 && argc > optind)\n {\n const char *cp;\n for (cp=argv[optind]; ISDIGIT (cp[0]); ++cp)\n ;\n if (cp[0] == '\\0')\n coptarg = argv[optind++];\n }\n\n if (!doit)\n break;\n\n if (coptarg)\n {\n int i = atoi (coptarg);\n const char *cp;\n\n /* Yes, I realize we're repeating this in some cases. */\n for (cp = coptarg; ISDIGIT (cp[0]); ++cp)\n ;\n\n if (i < 1 || cp[0] != '\\0')\n {\n error (NILF, 0,\n _(\"the '-%c' option requires a positive integer argument\"),\n cs->c);\n bad = 1;\n }\n else\n *(unsigned int *) cs->value_ptr = i;\n }\n else\n *(unsigned int *) cs->value_ptr\n = *(unsigned int *) cs->noarg_value;\n break;\n\n case floating:\n if (coptarg == 0 && optind < argc\n && (ISDIGIT (argv[optind][0]) || argv[optind][0] == '.'))\n coptarg = argv[optind++];\n\n if (doit)\n *(double *) cs->value_ptr\n = (coptarg != 0 ? atof (coptarg)\n : *(double *) cs->noarg_value);\n\n break;\n }\n\n /* We've found the switch. Stop looking. */\n break;\n }\n }\n\n /* There are no more options according to getting getopt, but there may\n be some arguments left. Since we have asked for non-option arguments\n to be returned in order, this only happens when there is a \"--\"\n argument to prevent later arguments from being options. */\n while (optind < argc)\n handle_non_switch_argument (argv[optind++], env);\n\n if (!env && (bad || print_usage_flag))\n {\n print_usage (bad);\n die (bad ? MAKE_FAILURE : MAKE_SUCCESS);\n }\n\n /* If there are any options that need to be decoded do it now. */\n decode_debug_flags ();\n decode_output_sync_flags ();\n\n /* Perform any special switch handling. */\n run_silent = silent_flag;\n\n}\n\n/* Decode switches from environment variable ENVAR (which is LEN chars long).\n We do this by chopping the value into a vector of words, prepending a\n dash to the first word if it lacks one, and passing the vector to\n decode_switches. */\n\nstatic void\ndecode_env_switches (const char *envar, size_t len)\n{\n char *varref = alloca (2 + len + 2);\n char *value, *p, *buf;\n int argc;\n const char **argv;\n\n /* Get the variable's value. */\n varref[0] = '$';\n varref[1] = '(';\n memcpy (&varref[2], envar, len);\n varref[2 + len] = ')';\n varref[2 + len + 1] = '\\0';\n value = variable_expand (varref);\n\n /* Skip whitespace, and check for an empty value. */\n NEXT_TOKEN (value);\n len = strlen (value);\n if (len == 0)\n return;\n\n /* Allocate a vector that is definitely big enough. */\n argv = alloca ((1 + len + 1) * sizeof (char *));\n\n /* getopt will look at the arguments starting at ARGV[1].\n Prepend a spacer word. */\n argv[0] = 0;\n argc = 1;\n\n /* We need a buffer to copy the value into while we split it into words\n and unquote it. Set up in case we need to prepend a dash later. */\n buf = alloca (1 + len + 1);\n buf[0] = '-';\n p = buf+1;\n argv[argc] = p;\n while (*value != '\\0')\n {\n if (*value == '\\\\' && value[1] != '\\0')\n ++value; /* Skip the backslash. */\n else if (ISBLANK (*value))\n {\n /* End of the word. */\n *p++ = '\\0';\n argv[++argc] = p;\n do\n ++value;\n while (ISBLANK (*value));\n continue;\n }\n *p++ = *value++;\n }\n *p = '\\0';\n argv[++argc] = 0;\n assert (p < buf + len + 2);\n\n if (argv[1][0] != '-' && strchr (argv[1], '=') == 0)\n /* The first word doesn't start with a dash and isn't a variable\n definition, so add a dash. */\n argv[1] = buf;\n\n /* Parse those words. */\n decode_switches (argc, argv, 1);\n}\n\f\n/* Quote the string IN so that it will be interpreted as a single word with\n no magic by decode_env_switches; also double dollar signs to avoid\n variable expansion in make itself. Write the result into OUT, returning\n the address of the next character to be written.\n Allocating space for OUT twice the length of IN is always sufficient. */\n\nstatic char *\nquote_for_env (char *out, const char *in)\n{\n while (*in != '\\0')\n {\n if (*in == '$')\n *out++ = '$';\n else if (ISBLANK (*in) || *in == '\\\\')\n *out++ = '\\\\';\n *out++ = *in++;\n }\n\n return out;\n}\n\n/* Define the MAKEFLAGS and MFLAGS variables to reflect the settings of the\n command switches. Include options with args if ALL is nonzero.\n Don't include options with the 'no_makefile' flag set if MAKEFILE. */\n\nstatic struct variable *\ndefine_makeflags (int all, int makefile)\n{\n const char ref[] = \"MAKEOVERRIDES\";\n const char posixref[] = \"-*-command-variables-*-\";\n const char evalref[] = \"$(-*-eval-flags-*-)\";\n const struct command_switch *cs;\n char *flagstring;\n char *p;\n\n /* We will construct a linked list of 'struct flag's describing\n all the flags which need to go in MAKEFLAGS. Then, once we\n know how many there are and their lengths, we can put them all\n together in a string. */\n\n struct flag\n {\n struct flag *next;\n const struct command_switch *cs;\n const char *arg;\n };\n struct flag *flags = 0;\n struct flag *last = 0;\n size_t flagslen = 0;\n#define ADD_FLAG(ARG, LEN) \\\n do { \\\n struct flag *new = alloca (sizeof (struct flag)); \\\n new->cs = cs; \\\n new->arg = (ARG); \\\n new->next = 0; \\\n if (! flags) \\\n flags = new; \\\n else \\\n last->next = new; \\\n last = new; \\\n if (new->arg == 0) \\\n /* Just a single flag letter: \" -x\" */ \\\n flagslen += 3; \\\n else \\\n /* \" -xfoo\", plus space to escape \"foo\". */ \\\n flagslen += 1 + 1 + 1 + (3 * (LEN)); \\\n if (!short_option (cs->c)) \\\n /* This switch has no single-letter version, so we use the long. */ \\\n flagslen += 2 + strlen (cs->long_name); \\\n } while (0)\n\n for (cs = switches; cs->c != '\\0'; ++cs)\n if (cs->toenv && (!makefile || !cs->no_makefile))\n switch (cs->type)\n {\n case ignore:\n break;\n\n case flag:\n case flag_off:\n if ((!*(int *) cs->value_ptr) == (cs->type == flag_off)\n && (cs->default_value == 0\n || *(int *) cs->value_ptr != *(int *) cs->default_value))\n\t if (cs->c != 'X') ADD_FLAG (0, 0);\n break;\n\n case positive_int:\n if (all)\n {\n if ((cs->default_value != 0\n && (*(unsigned int *) cs->value_ptr\n == *(unsigned int *) cs->default_value)))\n break;\n else if (cs->noarg_value != 0\n && (*(unsigned int *) cs->value_ptr ==\n *(unsigned int *) cs->noarg_value))\n ADD_FLAG (\"\", 0); /* Optional value omitted; see below. */\n else\n {\n char *buf = alloca (30);\n sprintf (buf, \"%u\", *(unsigned int *) cs->value_ptr);\n ADD_FLAG (buf, strlen (buf));\n }\n }\n break;\n\n case floating:\n if (all)\n {\n if (cs->default_value != 0\n && (*(double *) cs->value_ptr\n == *(double *) cs->default_value))\n break;\n else if (cs->noarg_value != 0\n && (*(double *) cs->value_ptr\n == *(double *) cs->noarg_value))\n ADD_FLAG (\"\", 0); /* Optional value omitted; see below. */\n else\n {\n char *buf = alloca (100);\n sprintf (buf, \"%g\", *(double *) cs->value_ptr);\n ADD_FLAG (buf, strlen (buf));\n }\n }\n break;\n\n case string:\n if (all)\n {\n p = *((char **)cs->value_ptr);\n if (p)\n ADD_FLAG (p, strlen (p));\n }\n break;\n\n case filename:\n case strlist:\n if (all)\n {\n struct stringlist *sl = *(struct stringlist **) cs->value_ptr;\n if (sl != 0)\n {\n unsigned int i;\n for (i = 0; i < sl->idx; ++i)\n ADD_FLAG (sl->list[i], strlen (sl->list[i]));\n }\n }\n break;\n\n default:\n abort ();\n }\n\n#undef ADD_FLAG\n\n /* Four more for the possible \" -- \", plus variable references. */\n flagslen += 4 + CSTRLEN (posixref) + 4 + CSTRLEN (evalref) + 4;\n\n /* Construct the value in FLAGSTRING.\n We allocate enough space for a preceding dash and trailing null. */\n flagstring = alloca (1 + flagslen + 1);\n memset (flagstring, '\\0', 1 + flagslen + 1);\n p = flagstring;\n\n /* Start with a dash, for MFLAGS. */\n *p++ = '-';\n\n /* Add simple options as a group. */\n while (flags != 0 && !flags->arg && short_option (flags->cs->c))\n {\n if (flags->cs->c != 'X') {\n *p++ = (char) flags->cs->c;\n flags = flags->next;\n }\n }\n\n /* Now add more complex flags: ones with options and/or long names. */\n while (flags)\n {\n *p++ = ' ';\n *p++ = '-';\n\n /* Add the flag letter or name to the string. */\n if (short_option (flags->cs->c)) {\n if (flags->cs->c != 'X') *p++ = (char) flags->cs->c;\n } else\n {\n /* Long options require a double-dash. */\n *p++ = '-';\n strcpy (p, flags->cs->long_name);\n p += strlen (p);\n }\n /* An omitted optional argument has an ARG of \"\". */\n if (flags->arg && flags->arg[0] != '\\0')\n {\n if (!short_option (flags->cs->c))\n /* Long options require '='. */\n *p++ = '=';\n p = quote_for_env (p, flags->arg);\n }\n flags = flags->next;\n }\n\n /* If no flags at all, get rid of the initial dash. */\n if (p == &flagstring[1])\n {\n flagstring[0] = '\\0';\n p = flagstring;\n }\n\n /* Define MFLAGS before appending variable definitions. Omit an initial\n empty dash. Since MFLAGS is not parsed for flags, there is no reason to\n override any makefile redefinition. */\n define_variable_cname (\"MFLAGS\",\n flagstring + (flagstring[0] == '-' && flagstring[1] == ' ' ? 2 : 0),\n o_env, 1);\n\n /* Write a reference to -*-eval-flags-*-, which contains all the --eval\n flag options. */\n if (eval_strings)\n {\n *p++ = ' ';\n memcpy (p, evalref, CSTRLEN (evalref));\n p += CSTRLEN (evalref);\n }\n\n if (all)\n {\n /* If there are any overrides to add, write a reference to\n $(MAKEOVERRIDES), which contains command-line variable definitions.\n Separate the variables from the switches with a \"--\" arg. */\n\n const char *r = posix_pedantic ? posixref : ref;\n size_t l = strlen (r);\n struct variable *v = lookup_variable (r, l);\n\n if (v && v->value && v->value[0] != '\\0')\n {\n strcpy (p, \" -- \");\n p += 4;\n\n *(p++) = '$';\n *(p++) = '(';\n memcpy (p, r, l);\n p += l;\n *(p++) = ')';\n }\n }\n\n /* If there is a leading dash, omit it. */\n if (flagstring[0] == '-')\n ++flagstring;\n\n /* This used to use o_env, but that lost when a makefile defined MAKEFLAGS.\n Makefiles set MAKEFLAGS to add switches, but we still want to redefine\n its value with the full set of switches. Then we used o_file, but that\n lost when users added -e, causing a previous MAKEFLAGS env. var. to take\n precedence over the new one. Of course, an override or command\n definition will still take precedence. */\n return define_variable_cname (\"MAKEFLAGS\", flagstring,\n env_overrides ? o_env_override : o_file, 1);\n}\n\f\n/* Print version information. */\n\nstatic void\nprint_version (void)\n{\n static int printed_version = 0;\n\n const char *precede = print_data_base_flag ? \"# \" : \"\";\n\n if (printed_version)\n /* Do it only once. */\n return;\n\n printf (\"%sGNU Make %s\\n\", precede, version_string);\n\n if (!remote_description || *remote_description == '\\0')\n printf (_(\"%sBuilt for %s\\n\"), precede, make_host);\n else\n printf (_(\"%sBuilt for %s (%s)\\n\"),\n precede, make_host, remote_description);\n\n /* Print this untranslated. The coding standards recommend translating the\n (C) to the copyright symbol, but this string is going to change every\n year, and none of the rest of it should be translated (including the\n word \"Copyright\"), so it hardly seems worth it. */\n\n printf (\"%sCopyright (C) 1988-2020 Free Software Foundation, Inc.\\n\"\n\t \"Copyright (C) 2015, 2017 Rocky Bernstein.\\n\",\n precede);\n\n printf (_(\"%sLicense GPLv3+: GNU GPL version 3 or later \\n\\\n%sThis is free software: you are free to change and redistribute it.\\n\\\n%sThere is NO WARRANTY, to the extent permitted by law.\\n\"),\n precede, precede, precede);\n\n printed_version = 1;\n\n /* Flush stdout so the user doesn't have to wait to see the\n version information while make thinks about things. */\n fflush (stdout);\n}\n\n/* Print a bunch of information about this and that. */\n\nstatic void\nprint_data_base (void)\n{\n time_t when = time ((time_t *) 0);\n\n print_version ();\n\n printf (_(\"\\n# Make data base, printed on %s\"), ctime (&when));\n\n print_variable_data_base ();\n print_dir_data_base ();\n print_rule_data_base (true);\n print_file_data_base ();\n print_vpath_data_base ();\n strcache_print_stats (\"#\");\n\n when = time ((time_t *) 0);\n printf (_(\"\\n# Finished Make data base on %s\\n\"), ctime (&when));\n}\n\nstatic void\nclean_jobserver (int status)\n{\n /* Sanity: have we written all our jobserver tokens back? If our\n exit status is 2 that means some kind of syntax error; we might not\n have written all our tokens so do that now. If tokens are left\n after any other error code, that's bad. */\n\n if (jobserver_enabled() && jobserver_tokens)\n {\n if (status != 2)\n ON (error, NILF,\n \"INTERNAL: Exiting with %u jobserver tokens (should be 0)!\",\n jobserver_tokens);\n else\n /* Don't write back the \"free\" token */\n while (--jobserver_tokens)\n jobserver_release (0);\n }\n\n\n /* Sanity: If we're the master, were all the tokens written back? */\n\n if (master_job_slots)\n {\n /* We didn't write one for ourself, so start at 1. */\n unsigned int tokens = 1 + jobserver_acquire_all ();\n\n if (tokens != master_job_slots)\n ONN (error, NILF,\n \"INTERNAL: Exiting with %u jobserver tokens available; should be %u!\",\n tokens, master_job_slots);\n\n reset_jobserver ();\n }\n}\n\f\n/* Exit with STATUS, cleaning up as necessary. */\n\nvoid\ndie (int status)\n{\n static char dying = 0;\n\n if (!dying)\n {\n int err;\n\n dying = 1;\n\n if (print_version_flag)\n print_version ();\n\n /* Wait for children to die. */\n err = (status != 0);\n while (job_slots_used > 0)\n reap_children (1, err, NULL);\n\n /* Let the remote job module clean up its state. */\n remote_cleanup ();\n\n /* Remove the intermediate files. */\n remove_intermediates (0);\n\n if (print_data_base_flag)\n print_data_base ();\n\n if (verify_flag)\n verify_file_data_base ();\n\n clean_jobserver (status);\n\n if (output_context)\n {\n /* die() might be called in a recipe output context due to an\n $(error ...) function. */\n output_close (output_context);\n\n if (output_context != &make_sync)\n output_close (&make_sync);\n\n OUTPUT_UNSET ();\n }\n\n output_close (NULL);\n\n /* Try to move back to the original directory. This is essential on\n MS-DOS (where there is really only one process), and on Unix it\n puts core files in the original directory instead of the -C\n directory. Must wait until after remove_intermediates(), or unlinks\n of relative pathnames fail. */\n if (directory_before_chdir != 0)\n {\n /* If it fails we don't care: shut up GCC. */\n int _x UNUSED;\n _x = chdir (directory_before_chdir);\n }\n }\n\n if (profile_flag) {\n const char *status_str;\n switch (status) {\n case MAKE_SUCCESS:\n\tstatus_str = \"Normal program termination\";\n\tbreak;\n case MAKE_TROUBLE:\n\tstatus_str = \"Platform failure termination\";\n\tbreak;\n case MAKE_FAILURE:\n\tstatus_str = \"Failure program termination\";\n\tbreak;\n case DEBUGGER_QUIT_RC:\n\tstatus_str = \"Debugger termination\";\n\tbreak;\n default:\n\tstatus_str = \"\";\n }\n\n profile_close(status_str, goals, (jobserver_auth != NULL));\n }\n exit (status);\n}\n"}}
-{"repo": "RobotiumTech/robotium", "pr_number": 851, "title": "Added a loop that retries getLocationOnScreen until it gets a value", "state": "closed", "merged_at": "2016-09-27T18:23:56Z", "additions": 7, "deletions": 1, "files_changed": ["robotium-solo/src/main/java/com/robotium/solo/Clicker.java"], "files_before": {"robotium-solo/src/main/java/com/robotium/solo/Clicker.java": "package com.robotium.solo;\n\nimport java.lang.reflect.Constructor;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\n\nimport junit.framework.Assert;\nimport android.app.Activity;\nimport android.app.Instrumentation;\nimport android.content.Context;\nimport android.os.SystemClock;\nimport android.util.Log;\nimport android.view.KeyEvent;\nimport android.view.MenuItem;\nimport android.view.MotionEvent;\nimport android.view.View;\nimport android.view.ViewConfiguration;\nimport android.view.ViewGroup;\nimport android.view.Window;\nimport android.widget.AbsListView;\nimport android.widget.TextView;\n\n/**\n * Contains various click methods. Examples are: clickOn(),\n * clickOnText(), clickOnScreen().\n *\n * @author Renas Reda, renas.reda@robotium.com\n *\n */\n\nclass Clicker {\n\n\tprivate final String LOG_TAG = \"Robotium\";\n\tprivate final ActivityUtils activityUtils;\n\tprivate final ViewFetcher viewFetcher;\n\tprivate final Instrumentation inst;\n\tprivate final Sender sender;\n\tprivate final Sleeper sleeper;\n\tprivate final Waiter waiter;\n\tprivate final WebUtils webUtils;\n\tprivate final DialogUtils dialogUtils;\n\tprivate final int MINI_WAIT = 300;\n\tprivate final int WAIT_TIME = 1500;\n\n\n\t/**\n\t * Constructs this object.\n\t *\n\t * @param activityUtils the {@code ActivityUtils} instance\n\t * @param viewFetcher the {@code ViewFetcher} instance\n\t * @param sender the {@code Sender} instance\n\t * @param inst the {@code android.app.Instrumentation} instance\n\t * @param sleeper the {@code Sleeper} instance\n\t * @param waiter the {@code Waiter} instance\n\t * @param webUtils the {@code WebUtils} instance\n\t * @param dialogUtils the {@code DialogUtils} instance\n\t */\n\n\tpublic Clicker(ActivityUtils activityUtils, ViewFetcher viewFetcher, Sender sender, Instrumentation inst, Sleeper sleeper, Waiter waiter, WebUtils webUtils, DialogUtils dialogUtils) {\n\n\t\tthis.activityUtils = activityUtils;\n\t\tthis.viewFetcher = viewFetcher;\n\t\tthis.sender = sender;\n\t\tthis.inst = inst;\n\t\tthis.sleeper = sleeper;\n\t\tthis.waiter = waiter;\n\t\tthis.webUtils = webUtils;\n\t\tthis.dialogUtils = dialogUtils;\n\t}\n\n\t/**\n\t * Clicks on a given coordinate on the screen.\n\t *\n\t * @param x the x coordinate\n\t * @param y the y coordinate\n\t */\n\n\tpublic void clickOnScreen(float x, float y, View view) {\n\t\tboolean successfull = false;\n\t\tint retry = 0;\n\t\tSecurityException ex = null;\n\n\t\twhile(!successfull && retry < 20) {\n\t\t\tlong downTime = SystemClock.uptimeMillis();\n\t\t\tlong eventTime = SystemClock.uptimeMillis();\n\t\t\tMotionEvent event = MotionEvent.obtain(downTime, eventTime,\n\t\t\t\t\tMotionEvent.ACTION_DOWN, x, y, 0);\n\t\t\tMotionEvent event2 = MotionEvent.obtain(downTime, eventTime,\n\t\t\t\t\tMotionEvent.ACTION_UP, x, y, 0);\n\t\t\ttry{\n\t\t\t\tinst.sendPointerSync(event);\n\t\t\t\tinst.sendPointerSync(event2);\n\t\t\t\tsuccessfull = true;\n\t\t\t}catch(SecurityException e){\n\t\t\t\tex = e;\n\t\t\t\tdialogUtils.hideSoftKeyboard(null, false, true);\n\t\t\t\tsleeper.sleep(MINI_WAIT);\n\t\t\t\tretry++;\n\t\t\t\tView identicalView = viewFetcher.getIdenticalView(view);\n\t\t\t\tif(identicalView != null){\n\t\t\t\t\tfloat[] xyToClick = getClickCoordinates(identicalView);\n\t\t\t\t\tx = xyToClick[0]; \n\t\t\t\t\ty = xyToClick[1];\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(!successfull) {\n\t\t\tAssert.fail(\"Click at (\"+x+\", \"+y+\") can not be completed! (\"+(ex != null ? ex.getClass().getName()+\": \"+ex.getMessage() : \"null\")+\")\");\n\t\t}\n\t}\n\n\t/**\n\t * Long clicks a given coordinate on the screen.\n\t *\n\t * @param x the x coordinate\n\t * @param y the y coordinate\n\t * @param time the amount of time to long click\n\t */\n\n\tpublic void clickLongOnScreen(float x, float y, int time, View view) {\n\t\tboolean successfull = false;\n\t\tint retry = 0;\n\t\tSecurityException ex = null;\n\t\tlong downTime = SystemClock.uptimeMillis();\n\t\tlong eventTime = SystemClock.uptimeMillis();\n\t\tMotionEvent event = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_DOWN, x, y, 0);\n\n\t\twhile(!successfull && retry < 20) {\n\t\t\ttry{\n\t\t\t\tinst.sendPointerSync(event);\n\t\t\t\tsuccessfull = true;\n\t\t\t\tsleeper.sleep(MINI_WAIT);\n\t\t\t}catch(SecurityException e){\n\t\t\t\tex = e;\n\t\t\t\tdialogUtils.hideSoftKeyboard(null, false, true);\n\t\t\t\tsleeper.sleep(MINI_WAIT);\n\t\t\t\tretry++;\n\t\t\t\tView identicalView = viewFetcher.getIdenticalView(view);\n\t\t\t\tif(identicalView != null){\n\t\t\t\t\tfloat[] xyToClick = getClickCoordinates(identicalView);\n\t\t\t\t\tx = xyToClick[0];\n\t\t\t\t\ty = xyToClick[1];\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(!successfull) {\n\t\t\tAssert.fail(\"Long click at (\"+x+\", \"+y+\") can not be completed! (\"+(ex != null ? ex.getClass().getName()+\": \"+ex.getMessage() : \"null\")+\")\");\n\t\t}\n\n\t\teventTime = SystemClock.uptimeMillis();\n\t\tevent = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_MOVE, x + 1.0f, y + 1.0f, 0);\n\t\tinst.sendPointerSync(event);\n\t\tif(time > 0)\n\t\t\tsleeper.sleep(time);\n\t\telse\n\t\t\tsleeper.sleep((int)(ViewConfiguration.getLongPressTimeout() * 2.5f));\n\n\t\teventTime = SystemClock.uptimeMillis();\n\t\tevent = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_UP, x, y, 0);\n\t\tinst.sendPointerSync(event);\n\t\tsleeper.sleep();\n\t}\n\n\n\t/**\n\t * Clicks on a given {@link View}.\n\t *\n\t * @param view the view that should be clicked\n\t */\n\n\tpublic void clickOnScreen(View view) {\n\t\tclickOnScreen(view, false, 0);\n\t}\n\n\t/**\n\t * Private method used to click on a given view.\n\t *\n\t * @param view the view that should be clicked\n\t * @param longClick true if the click should be a long click\n\t * @param time the amount of time to long click\n\t */\n\n\tpublic void clickOnScreen(View view, boolean longClick, int time) {\n\t\tif(view == null)\n\t\t\tAssert.fail(\"View is null and can therefore not be clicked!\");\n\n\t\tfloat[] xyToClick = getClickCoordinates(view);\n\t\tfloat x = xyToClick[0];\n\t\tfloat y = xyToClick[1];\n\n\t\tif(x == 0 || y == 0){\n\t\t\tsleeper.sleepMini();\n\t\t\ttry {\n\t\t\t\tview = viewFetcher.getIdenticalView(view);\n\t\t\t} catch (Exception ignored){}\n\n\t\t\tif(view != null){\n\t\t\t\txyToClick = getClickCoordinates(view);\n\t\t\t\tx = xyToClick[0];\n\t\t\t\ty = xyToClick[1];\n\t\t\t}\n\t\t}\n\n\t\tif (longClick)\n\t\t\tclickLongOnScreen(x, y, time, view);\n\t\telse\n\t\t\tclickOnScreen(x, y, view);\n\t}\t\n\n\t/**\n\t * Returns click coordinates for the specified view.\n\t * \n\t * @param view the view to get click coordinates from\n\t * @return click coordinates for a specified view\n\t */\n\n\tprivate float[] getClickCoordinates(View view){\n\t\tsleeper.sleep(200);\n\t\tint[] xyLocation = new int[2];\n\t\tfloat[] xyToClick = new float[2];\n\n\t\tview.getLocationOnScreen(xyLocation);\n\n\t\tfinal int viewWidth = view.getWidth();\n\t\tfinal int viewHeight = view.getHeight();\n\t\tfinal float x = xyLocation[0] + (viewWidth / 2.0f);\n\t\tfloat y = xyLocation[1] + (viewHeight / 2.0f);\n\n\t\txyToClick[0] = x;\n\t\txyToClick[1] = y;\n\n\t\treturn xyToClick;\n\t}\n\t\n\t\n\n\n\t/**\n\t * Long clicks on a specific {@link TextView} and then selects\n\t * an item from the context menu that appears. Will automatically scroll when needed.\n\t *\n\t * @param text the text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t * @param index the index of the menu item that should be pressed\n\t */\n\n\tpublic void clickLongOnTextAndPress(String text, int index)\n\t{\n\t\tclickOnText(text, true, 0, true, 0);\n\t\tdialogUtils.waitForDialogToOpen(Timeout.getSmallTimeout(), true);\n\t\ttry{\n\t\t\tinst.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);\n\t\t}catch(SecurityException e){\n\t\t\tAssert.fail(\"Can not press the context menu!\");\n\t\t}\n\t\tfor(int i = 0; i < index; i++)\n\t\t{\n\t\t\tsleeper.sleepMini();\n\t\t\tinst.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);\n\t\t}\n\t\tinst.sendKeyDownUpSync(KeyEvent.KEYCODE_ENTER);\n\t}\n\n\t/**\n\t * Opens the menu and waits for it to open.\n\t */\n\n\tprivate void openMenu(){\n\t\tsleeper.sleepMini();\n\n\t\tif(!dialogUtils.waitForDialogToOpen(MINI_WAIT, false)) {\n\t\t\ttry{\n\t\t\t\tsender.sendKeyCode(KeyEvent.KEYCODE_MENU);\n\t\t\t\tdialogUtils.waitForDialogToOpen(WAIT_TIME, true);\n\t\t\t}catch(SecurityException e){\n\t\t\t\tAssert.fail(\"Can not open the menu!\");\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on a menu item with a given text.\n\t *\n\t * @param text the menu text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t */\n\n\tpublic void clickOnMenuItem(String text)\n\t{\n\t\topenMenu();\n\t\tclickOnText(text, false, 1, true, 0);\n\t}\n\n\t/**\n\t * Clicks on a menu item with a given text.\n\t *\n\t * @param text the menu text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t * @param subMenu true if the menu item could be located in a sub menu\n\t */\n\n\tpublic void clickOnMenuItem(String text, boolean subMenu)\n\t{\n\t\tsleeper.sleepMini();\n\n\t\tTextView textMore = null;\n\t\tint [] xy = new int[2];\n\t\tint x = 0;\n\t\tint y = 0;\n\n\t\tif(!dialogUtils.waitForDialogToOpen(MINI_WAIT, false)) {\n\t\t\ttry{\n\t\t\t\tsender.sendKeyCode(KeyEvent.KEYCODE_MENU);\n\t\t\t\tdialogUtils.waitForDialogToOpen(WAIT_TIME, true);\n\t\t\t}catch(SecurityException e){\n\t\t\t\tAssert.fail(\"Can not open the menu!\");\n\t\t\t}\n\t\t}\n\t\tboolean textShown = waiter.waitForText(text, 1, WAIT_TIME, true) != null;\n\n\t\tif(subMenu && (viewFetcher.getCurrentViews(TextView.class, true).size() > 5) && !textShown){\n\t\t\tfor(TextView textView : viewFetcher.getCurrentViews(TextView.class, true)){\n\t\t\t\tx = xy[0];\n\t\t\t\ty = xy[1];\n\t\t\t\ttextView.getLocationOnScreen(xy);\n\n\t\t\t\tif(xy[0] > x || xy[1] > y)\n\t\t\t\t\ttextMore = textView;\n\t\t\t}\n\t\t}\n\t\tif(textMore != null)\n\t\t\tclickOnScreen(textMore);\n\n\t\tclickOnText(text, false, 1, true, 0);\n\t}\n\n\t/**\n\t * Clicks on an ActionBar item with a given resource id\n\t *\n\t * @param resourceId the R.id of the ActionBar item\n\t */\n\n\tpublic void clickOnActionBarItem(int resourceId){\n\t\tsleeper.sleep();\n\t\tActivity activity = activityUtils.getCurrentActivity();\n\t\tif(activity != null){\n\t\t\tinst.invokeMenuActionSync(activity, resourceId, 0);\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on an ActionBar Home/Up button.\n\t */\n\n\tpublic void clickOnActionBarHomeButton() {\n\t\tActivity activity = activityUtils.getCurrentActivity();\n\t\tMenuItem homeMenuItem = null;\n\n\t\ttry {\n\t\t\tClass> cls = Class.forName(\"com.android.internal.view.menu.ActionMenuItem\");\n\t\t\tClass> partypes[] = new Class[6];\n\t\t\tpartypes[0] = Context.class;\n\t\t\tpartypes[1] = Integer.TYPE;\n\t\t\tpartypes[2] = Integer.TYPE;\n\t\t\tpartypes[3] = Integer.TYPE;\n\t\t\tpartypes[4] = Integer.TYPE;\n\t\t\tpartypes[5] = CharSequence.class;\n\t\t\tConstructor> ct = cls.getConstructor(partypes);\n\t\t\tObject argList[] = new Object[6];\n\t\t\targList[0] = activity;\n\t\t\targList[1] = 0;\n\t\t\targList[2] = android.R.id.home;\n\t\t\targList[3] = 0;\n\t\t\targList[4] = 0;\n\t\t\targList[5] = \"\";\n\t\t\thomeMenuItem = (MenuItem) ct.newInstance(argList);\n\t\t} catch (Exception ex) {\n\t\t\tLog.d(LOG_TAG, \"Can not find methods to invoke Home button!\");\n\t\t}\n\n\t\tif (homeMenuItem != null) {\n\t\t\ttry{\n\t\t\t\tactivity.getWindow().getCallback().onMenuItemSelected(Window.FEATURE_OPTIONS_PANEL, homeMenuItem);\n\t\t\t}catch(Exception ignored) {}\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on a web element using the given By method.\n\t *\n\t * @param by the By object e.g. By.id(\"id\");\n\t * @param match if multiple objects match, this determines which one will be clicked\n\t * @param scroll true if scrolling should be performed\n\t * @param useJavaScriptToClick true if click should be perfomed through JavaScript\n\t */\n\n\tpublic void clickOnWebElement(By by, int match, boolean scroll, boolean useJavaScriptToClick){\n\t\tWebElement webElement = null;\n\t\t\n\t\tif(useJavaScriptToClick){\n\t\t\twebElement = waiter.waitForWebElement(by, match, Timeout.getSmallTimeout(), false);\n\t\t\tif(webElement == null){\n\t\t\t\tAssert.fail(\"WebElement with \" + webUtils.splitNameByUpperCase(by.getClass().getSimpleName()) + \": '\" + by.getValue() + \"' is not found!\");\n\t\t\t}\n\t\t\twebUtils.executeJavaScript(by, true);\n\t\t\treturn;\n\t\t}\n\t\t\n\t\tWebElement webElementToClick = waiter.waitForWebElement(by, match, Timeout.getSmallTimeout(), scroll);\n\t\t\n\t\tif(webElementToClick == null){\n\t\t\tif(match > 1) {\n\t\t\t\tAssert.fail(match + \" WebElements with \" + webUtils.splitNameByUpperCase(by.getClass().getSimpleName()) + \": '\" + by.getValue() + \"' are not found!\");\n\t\t\t}\n\t\t\telse {\n\t\t\t\tAssert.fail(\"WebElement with \" + webUtils.splitNameByUpperCase(by.getClass().getSimpleName()) + \": '\" + by.getValue() + \"' is not found!\");\n\t\t\t}\n\t\t}\n\t\t\n\t\tclickOnScreen(webElementToClick.getLocationX(), webElementToClick.getLocationY(), null);\n\t}\n\n\n\t/**\n\t * Clicks on a specific {@link TextView} displaying a given text.\n\t *\n\t * @param regex the text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t * @param longClick {@code true} if the click should be a long click\n\t * @param match the regex match that should be clicked on\n\t * @param scroll true if scrolling should be performed\n\t * @param time the amount of time to long click\n\t */\n\n\tpublic void clickOnText(String regex, boolean longClick, int match, boolean scroll, int time) {\n\t\tTextView textToClick = waiter.waitForText(regex, match, Timeout.getSmallTimeout(), scroll, true, false);\n\n\t\tif (textToClick != null) {\n\t\t\tclickOnScreen(textToClick, longClick, time);\n\t\t}\n\n\t\telse {\n\n\t\t\tif(match > 1){\n\t\t\t\tAssert.fail(match + \" matches of text string: '\" + regex + \"' are not found!\");\n\t\t\t}\n\n\t\t\telse{\n\t\t\t\tArrayList allTextViews = RobotiumUtils.removeInvisibleViews(viewFetcher.getCurrentViews(TextView.class, true));\n\t\t\t\tallTextViews.addAll((Collection extends TextView>) webUtils.getTextViewsFromWebView());\n\n\t\t\t\tfor (TextView textView : allTextViews) {\n\t\t\t\t\tLog.d(LOG_TAG, \"'\" + regex + \"' not found. Have found: '\" + textView.getText() + \"'\");\n\t\t\t\t}\n\t\t\t\tallTextViews = null;\n\t\t\t\tAssert.fail(\"Text string: '\" + regex + \"' is not found!\");\n\t\t\t}\n\t\t}\n\t}\n\n\n\t/**\n\t * Clicks on a {@code View} of a specific class, with a given text.\n\t *\n\t * @param viewClass what kind of {@code View} to click, e.g. {@code Button.class} or {@code TextView.class}\n\t * @param nameRegex the name of the view presented to the user. The parameter will be interpreted as a regular expression.\n\t */\n\n\tpublic void clickOn(Class viewClass, String nameRegex) {\n\t\tT viewToClick = (T) waiter.waitForText(viewClass, nameRegex, 0, Timeout.getSmallTimeout(), true, true, false);\n\n\t\tif (viewToClick != null) {\n\t\t\tclickOnScreen(viewToClick);\n\t\t} else {\n\t\t\tArrayList allTextViews = RobotiumUtils.removeInvisibleViews(viewFetcher.getCurrentViews(viewClass, true));\n\n\t\t\tfor (T view : allTextViews) {\n\t\t\t\tLog.d(LOG_TAG, \"'\" + nameRegex + \"' not found. Have found: '\" + view.getText() + \"'\");\n\t\t\t}\n\t\t\tAssert.fail(viewClass.getSimpleName() + \" with text: '\" + nameRegex + \"' is not found!\");\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on a {@code View} of a specific class, with a certain index.\n\t *\n\t * @param viewClass what kind of {@code View} to click, e.g. {@code Button.class} or {@code ImageView.class}\n\t * @param index the index of the {@code View} to be clicked, within {@code View}s of the specified class\n\t */\n\n\tpublic void clickOn(Class viewClass, int index) {\n\t\tclickOnScreen(waiter.waitForAndGetView(index, viewClass));\n\t}\n\n\n\t/**\n\t * Clicks on a certain list line and returns the {@link TextView}s that\n\t * the list line is showing. Will use the first list it finds.\n\t *\n\t * @param line the line that should be clicked\n\t * @return a {@code List} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInList(int line) {\n\t\treturn clickInList(line, 0, 0, false, 0);\n\t}\n\t\n\t/**\n\t * Clicks on a View with a specified resource id located in a specified list line\n\t *\n\t * @param line the line where the View is located\n\t * @param id the resource id of the View\n\t */\n\n\tpublic void clickInList(int line, int id) {\n\t\tclickInList(line, 0, id, false, 0);\n\t}\n\n\t/**\n\t * Clicks on a certain list line on a specified List and\n\t * returns the {@link TextView}s that the list line is showing.\n\t *\n\t * @param line the line that should be clicked\n\t * @param index the index of the list. E.g. Index 1 if two lists are available\n\t * @param id the resource id of the View to click\n\t * @return an {@code ArrayList} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInList(int line, int index, int id, boolean longClick, int time) {\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\n\t\tint lineIndex = line - 1;\n\t\tif(lineIndex < 0)\n\t\t\tlineIndex = 0;\n\n\t\tArrayList views = new ArrayList();\n\t\tfinal AbsListView absListView = waiter.waitForAndGetView(index, AbsListView.class);\n\n\t\tif(absListView == null)\n\t\t\tAssert.fail(\"AbsListView is null!\");\n\n\t\tfailIfIndexHigherThenChildCount(absListView, lineIndex, endTime);\n\n\t\tView viewOnLine = getViewOnAbsListLine(absListView, index, lineIndex);\n\n\t\tif(viewOnLine != null){\n\t\t\tviews = viewFetcher.getViews(viewOnLine, true);\n\t\t\tviews = RobotiumUtils.removeInvisibleViews(views);\n\n\t\t\tif(id == 0){\n\t\t\t\tclickOnScreen(viewOnLine, longClick, time);\n\t\t\t}\n\t\t\telse{\n\t\t\t\tclickOnScreen(getView(id, views));\n\t\t\t}\n\t\t}\n\t\treturn RobotiumUtils.filterViews(TextView.class, views);\n\t}\n\t\n\t/**\n\t * Clicks on a certain list line and returns the {@link TextView}s that\n\t * the list line is showing. Will use the first list it finds.\n\t *\n\t * @param line the line that should be clicked\n\t * @return a {@code List} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInRecyclerView(int line) {\n\t\treturn clickInRecyclerView(line, 0, 0, false, 0);\n\t}\n\t\n\t/**\n\t * Clicks on a View with a specified resource id located in a specified RecyclerView itemIndex\n\t *\n\t * @param itemIndex the index where the View is located\n\t * @param id the resource id of the View\n\t */\n\n\tpublic void clickInRecyclerView(int itemIndex, int id) {\n\t\tclickInRecyclerView(itemIndex, 0, id, false, 0);\n\t}\n\n\t\n\t/**\n\t * Clicks on a certain list line on a specified List and\n\t * returns the {@link TextView}s that the list line is showing.\n\t *\n\t * @param itemIndex the item index that should be clicked\n\t * @param recyclerViewIndex the index of the RecyclerView. E.g. Index 1 if two RecyclerViews are available\n\t * @param id the resource id of the View to click\n\t * @return an {@code ArrayList} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInRecyclerView(int itemIndex, int recyclerViewIndex, int id, boolean longClick, int time) {\n\t\tView viewOnLine = null;\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\n\t\tif(itemIndex < 0)\n\t\t\titemIndex = 0;\n\n\t\tArrayList views = new ArrayList();\n\t\tViewGroup recyclerView = viewFetcher.getRecyclerView(recyclerViewIndex, Timeout.getSmallTimeout());\n\t\t\n\t\tif(recyclerView == null){\n\t\t\tAssert.fail(\"RecyclerView is not found!\");\n\t\t}\n\t\telse{\n\t\t\tfailIfIndexHigherThenChildCount(recyclerView, itemIndex, endTime);\n\t\t\tviewOnLine = getViewOnRecyclerItemIndex((ViewGroup) recyclerView, recyclerViewIndex, itemIndex);\n\t\t}\n\t\t\n\t\tif(viewOnLine != null){\n\t\t\tviews = viewFetcher.getViews(viewOnLine, true);\n\t\t\tviews = RobotiumUtils.removeInvisibleViews(views);\n\t\t\t\n\t\t\tif(id == 0){\n\t\t\t\tclickOnScreen(viewOnLine, longClick, time);\n\t\t\t}\n\t\t\telse{\n\t\t\t\tclickOnScreen(getView(id, views));\n\t\t\t}\n\t\t}\n\t\treturn RobotiumUtils.filterViews(TextView.class, views);\n\t}\n\t\n\tprivate View getView(int id, List views){\n\t\tfor(View view : views){\n\t\t\tif(id == view.getId()){\n\t\t\t\treturn view;\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n\t\n\tprivate void failIfIndexHigherThenChildCount(ViewGroup viewGroup, int index, long endTime){\n\t\twhile(index > viewGroup.getChildCount()){\n\t\t\tfinal boolean timedOut = SystemClock.uptimeMillis() > endTime;\n\t\t\tif (timedOut){\n\t\t\t\tint numberOfIndexes = viewGroup.getChildCount();\n\t\t\t\tAssert.fail(\"Can not click on index \" + index + \" as there are only \" + numberOfIndexes + \" indexes available\");\n\t\t\t}\n\t\t\tsleeper.sleep();\n\t\t}\n\t}\n\t\n\n\t/**\n\t * Returns the view in the specified list line\n\t * \n\t * @param absListView the ListView to use\n\t * @param index the index of the list. E.g. Index 1 if two lists are available\n\t * @param lineIndex the line index of the View\n\t * @return the View located at a specified list line\n\t */\n\n\tprivate View getViewOnAbsListLine(AbsListView absListView, int index, int lineIndex){\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\t\tView view = absListView.getChildAt(lineIndex);\n\n\t\twhile(view == null){\n\t\t\tfinal boolean timedOut = SystemClock.uptimeMillis() > endTime;\n\t\t\tif (timedOut){\n\t\t\t\tAssert.fail(\"View is null and can therefore not be clicked!\");\n\t\t\t}\n\t\t\t\n\t\t\tsleeper.sleep();\n\t\t\tabsListView = (AbsListView) viewFetcher.getIdenticalView(absListView);\n\n\t\t\tif(absListView == null){\n\t\t\t\tabsListView = waiter.waitForAndGetView(index, AbsListView.class);\n\t\t\t}\n\t\t\t\n\t\t\tview = absListView.getChildAt(lineIndex);\n\t\t}\n\t\treturn view;\n\t}\n\t\n\t/**\n\t * Returns the view in the specified item index\n\t * \n\t * @param recyclerView the RecyclerView to use\n\t * @param itemIndex the item index of the View\n\t * @return the View located at a specified item index\n\t */\n\n\tprivate View getViewOnRecyclerItemIndex(ViewGroup recyclerView, int recyclerViewIndex, int itemIndex){\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\t\tView view = recyclerView.getChildAt(itemIndex);\n\n\t\twhile(view == null){\n\t\t\tfinal boolean timedOut = SystemClock.uptimeMillis() > endTime;\n\t\t\tif (timedOut){\n\t\t\t\tAssert.fail(\"View is null and can therefore not be clicked!\");\n\t\t\t}\n\n\t\t\tsleeper.sleep();\n\t\t\trecyclerView = (ViewGroup) viewFetcher.getIdenticalView(recyclerView);\n\n\t\t\tif(recyclerView == null){\n\t\t\t\trecyclerView = (ViewGroup) viewFetcher.getRecyclerView(false, recyclerViewIndex);\n\t\t\t}\n\n\t\t\tif(recyclerView != null){\n\t\t\t\tview = recyclerView.getChildAt(itemIndex);\n\t\t\t}\n\t\t}\n\t\treturn view;\n\t}\n\t\n\t\n}\n"}, "files_after": {"robotium-solo/src/main/java/com/robotium/solo/Clicker.java": "package com.robotium.solo;\n\nimport java.lang.reflect.Constructor;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\n\nimport junit.framework.Assert;\nimport android.app.Activity;\nimport android.app.Instrumentation;\nimport android.content.Context;\nimport android.os.SystemClock;\nimport android.util.Log;\nimport android.view.KeyEvent;\nimport android.view.MenuItem;\nimport android.view.MotionEvent;\nimport android.view.View;\nimport android.view.ViewConfiguration;\nimport android.view.ViewGroup;\nimport android.view.Window;\nimport android.widget.AbsListView;\nimport android.widget.TextView;\n\n/**\n * Contains various click methods. Examples are: clickOn(),\n * clickOnText(), clickOnScreen().\n *\n * @author Renas Reda, renas.reda@robotium.com\n *\n */\n\nclass Clicker {\n\n\tprivate final String LOG_TAG = \"Robotium\";\n\tprivate final ActivityUtils activityUtils;\n\tprivate final ViewFetcher viewFetcher;\n\tprivate final Instrumentation inst;\n\tprivate final Sender sender;\n\tprivate final Sleeper sleeper;\n\tprivate final Waiter waiter;\n\tprivate final WebUtils webUtils;\n\tprivate final DialogUtils dialogUtils;\n\tprivate final int MINI_WAIT = 300;\n\tprivate final int WAIT_TIME = 1500;\n\n\n\t/**\n\t * Constructs this object.\n\t *\n\t * @param activityUtils the {@code ActivityUtils} instance\n\t * @param viewFetcher the {@code ViewFetcher} instance\n\t * @param sender the {@code Sender} instance\n\t * @param inst the {@code android.app.Instrumentation} instance\n\t * @param sleeper the {@code Sleeper} instance\n\t * @param waiter the {@code Waiter} instance\n\t * @param webUtils the {@code WebUtils} instance\n\t * @param dialogUtils the {@code DialogUtils} instance\n\t */\n\n\tpublic Clicker(ActivityUtils activityUtils, ViewFetcher viewFetcher, Sender sender, Instrumentation inst, Sleeper sleeper, Waiter waiter, WebUtils webUtils, DialogUtils dialogUtils) {\n\n\t\tthis.activityUtils = activityUtils;\n\t\tthis.viewFetcher = viewFetcher;\n\t\tthis.sender = sender;\n\t\tthis.inst = inst;\n\t\tthis.sleeper = sleeper;\n\t\tthis.waiter = waiter;\n\t\tthis.webUtils = webUtils;\n\t\tthis.dialogUtils = dialogUtils;\n\t}\n\n\t/**\n\t * Clicks on a given coordinate on the screen.\n\t *\n\t * @param x the x coordinate\n\t * @param y the y coordinate\n\t */\n\n\tpublic void clickOnScreen(float x, float y, View view) {\n\t\tboolean successfull = false;\n\t\tint retry = 0;\n\t\tSecurityException ex = null;\n\n\t\twhile(!successfull && retry < 20) {\n\t\t\tlong downTime = SystemClock.uptimeMillis();\n\t\t\tlong eventTime = SystemClock.uptimeMillis();\n\t\t\tMotionEvent event = MotionEvent.obtain(downTime, eventTime,\n\t\t\t\t\tMotionEvent.ACTION_DOWN, x, y, 0);\n\t\t\tMotionEvent event2 = MotionEvent.obtain(downTime, eventTime,\n\t\t\t\t\tMotionEvent.ACTION_UP, x, y, 0);\n\t\t\ttry{\n\t\t\t\tinst.sendPointerSync(event);\n\t\t\t\tinst.sendPointerSync(event2);\n\t\t\t\tsuccessfull = true;\n\t\t\t}catch(SecurityException e){\n\t\t\t\tex = e;\n\t\t\t\tdialogUtils.hideSoftKeyboard(null, false, true);\n\t\t\t\tsleeper.sleep(MINI_WAIT);\n\t\t\t\tretry++;\n\t\t\t\tView identicalView = viewFetcher.getIdenticalView(view);\n\t\t\t\tif(identicalView != null){\n\t\t\t\t\tfloat[] xyToClick = getClickCoordinates(identicalView);\n\t\t\t\t\tx = xyToClick[0]; \n\t\t\t\t\ty = xyToClick[1];\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(!successfull) {\n\t\t\tAssert.fail(\"Click at (\"+x+\", \"+y+\") can not be completed! (\"+(ex != null ? ex.getClass().getName()+\": \"+ex.getMessage() : \"null\")+\")\");\n\t\t}\n\t}\n\n\t/**\n\t * Long clicks a given coordinate on the screen.\n\t *\n\t * @param x the x coordinate\n\t * @param y the y coordinate\n\t * @param time the amount of time to long click\n\t */\n\n\tpublic void clickLongOnScreen(float x, float y, int time, View view) {\n\t\tboolean successfull = false;\n\t\tint retry = 0;\n\t\tSecurityException ex = null;\n\t\tlong downTime = SystemClock.uptimeMillis();\n\t\tlong eventTime = SystemClock.uptimeMillis();\n\t\tMotionEvent event = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_DOWN, x, y, 0);\n\n\t\twhile(!successfull && retry < 20) {\n\t\t\ttry{\n\t\t\t\tinst.sendPointerSync(event);\n\t\t\t\tsuccessfull = true;\n\t\t\t\tsleeper.sleep(MINI_WAIT);\n\t\t\t}catch(SecurityException e){\n\t\t\t\tex = e;\n\t\t\t\tdialogUtils.hideSoftKeyboard(null, false, true);\n\t\t\t\tsleeper.sleep(MINI_WAIT);\n\t\t\t\tretry++;\n\t\t\t\tView identicalView = viewFetcher.getIdenticalView(view);\n\t\t\t\tif(identicalView != null){\n\t\t\t\t\tfloat[] xyToClick = getClickCoordinates(identicalView);\n\t\t\t\t\tx = xyToClick[0];\n\t\t\t\t\ty = xyToClick[1];\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(!successfull) {\n\t\t\tAssert.fail(\"Long click at (\"+x+\", \"+y+\") can not be completed! (\"+(ex != null ? ex.getClass().getName()+\": \"+ex.getMessage() : \"null\")+\")\");\n\t\t}\n\n\t\teventTime = SystemClock.uptimeMillis();\n\t\tevent = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_MOVE, x + 1.0f, y + 1.0f, 0);\n\t\tinst.sendPointerSync(event);\n\t\tif(time > 0)\n\t\t\tsleeper.sleep(time);\n\t\telse\n\t\t\tsleeper.sleep((int)(ViewConfiguration.getLongPressTimeout() * 2.5f));\n\n\t\teventTime = SystemClock.uptimeMillis();\n\t\tevent = MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_UP, x, y, 0);\n\t\tinst.sendPointerSync(event);\n\t\tsleeper.sleep();\n\t}\n\n\n\t/**\n\t * Clicks on a given {@link View}.\n\t *\n\t * @param view the view that should be clicked\n\t */\n\n\tpublic void clickOnScreen(View view) {\n\t\tclickOnScreen(view, false, 0);\n\t}\n\n\t/**\n\t * Private method used to click on a given view.\n\t *\n\t * @param view the view that should be clicked\n\t * @param longClick true if the click should be a long click\n\t * @param time the amount of time to long click\n\t */\n\n\tpublic void clickOnScreen(View view, boolean longClick, int time) {\n\t\tif(view == null)\n\t\t\tAssert.fail(\"View is null and can therefore not be clicked!\");\n\n\t\tfloat[] xyToClick = getClickCoordinates(view);\n\t\tfloat x = xyToClick[0];\n\t\tfloat y = xyToClick[1];\n\n\t\tif(x == 0 || y == 0){\n\t\t\tsleeper.sleepMini();\n\t\t\ttry {\n\t\t\t\tview = viewFetcher.getIdenticalView(view);\n\t\t\t} catch (Exception ignored){}\n\n\t\t\tif(view != null){\n\t\t\t\txyToClick = getClickCoordinates(view);\n\t\t\t\tx = xyToClick[0];\n\t\t\t\ty = xyToClick[1];\n\t\t\t}\n\t\t}\n\n\t\tsleeper.sleep(300);\n\t\tif (longClick)\n\t\t\tclickLongOnScreen(x, y, time, view);\n\t\telse\n\t\t\tclickOnScreen(x, y, view);\n\t}\t\n\n\t/**\n\t * Returns click coordinates for the specified view.\n\t * \n\t * @param view the view to get click coordinates from\n\t * @return click coordinates for a specified view\n\t */\n\n\tprivate float[] getClickCoordinates(View view){\n\t\tint[] xyLocation = new int[2];\n\t\tfloat[] xyToClick = new float[2];\n\t\tint trialCount = 0;\n\n\t\tview.getLocationOnScreen(xyLocation);\n\t\twhile(xyLocation[0] == 0 && xyLocation[1] == 0 && trialCount < 10) {\n\t\t\tsleeper.sleep(300);\n\t\t\tview.getLocationOnScreen(xyLocation);\n\t\t\ttrialCount++;\n\t\t}\n\n\t\tfinal int viewWidth = view.getWidth();\n\t\tfinal int viewHeight = view.getHeight();\n\t\tfinal float x = xyLocation[0] + (viewWidth / 2.0f);\n\t\tfloat y = xyLocation[1] + (viewHeight / 2.0f);\n\n\t\txyToClick[0] = x;\n\t\txyToClick[1] = y;\n\n\t\treturn xyToClick;\n\t}\n\t\n\t\n\n\n\t/**\n\t * Long clicks on a specific {@link TextView} and then selects\n\t * an item from the context menu that appears. Will automatically scroll when needed.\n\t *\n\t * @param text the text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t * @param index the index of the menu item that should be pressed\n\t */\n\n\tpublic void clickLongOnTextAndPress(String text, int index)\n\t{\n\t\tclickOnText(text, true, 0, true, 0);\n\t\tdialogUtils.waitForDialogToOpen(Timeout.getSmallTimeout(), true);\n\t\ttry{\n\t\t\tinst.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);\n\t\t}catch(SecurityException e){\n\t\t\tAssert.fail(\"Can not press the context menu!\");\n\t\t}\n\t\tfor(int i = 0; i < index; i++)\n\t\t{\n\t\t\tsleeper.sleepMini();\n\t\t\tinst.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);\n\t\t}\n\t\tinst.sendKeyDownUpSync(KeyEvent.KEYCODE_ENTER);\n\t}\n\n\t/**\n\t * Opens the menu and waits for it to open.\n\t */\n\n\tprivate void openMenu(){\n\t\tsleeper.sleepMini();\n\n\t\tif(!dialogUtils.waitForDialogToOpen(MINI_WAIT, false)) {\n\t\t\ttry{\n\t\t\t\tsender.sendKeyCode(KeyEvent.KEYCODE_MENU);\n\t\t\t\tdialogUtils.waitForDialogToOpen(WAIT_TIME, true);\n\t\t\t}catch(SecurityException e){\n\t\t\t\tAssert.fail(\"Can not open the menu!\");\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on a menu item with a given text.\n\t *\n\t * @param text the menu text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t */\n\n\tpublic void clickOnMenuItem(String text)\n\t{\n\t\topenMenu();\n\t\tclickOnText(text, false, 1, true, 0);\n\t}\n\n\t/**\n\t * Clicks on a menu item with a given text.\n\t *\n\t * @param text the menu text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t * @param subMenu true if the menu item could be located in a sub menu\n\t */\n\n\tpublic void clickOnMenuItem(String text, boolean subMenu)\n\t{\n\t\tsleeper.sleepMini();\n\n\t\tTextView textMore = null;\n\t\tint [] xy = new int[2];\n\t\tint x = 0;\n\t\tint y = 0;\n\n\t\tif(!dialogUtils.waitForDialogToOpen(MINI_WAIT, false)) {\n\t\t\ttry{\n\t\t\t\tsender.sendKeyCode(KeyEvent.KEYCODE_MENU);\n\t\t\t\tdialogUtils.waitForDialogToOpen(WAIT_TIME, true);\n\t\t\t}catch(SecurityException e){\n\t\t\t\tAssert.fail(\"Can not open the menu!\");\n\t\t\t}\n\t\t}\n\t\tboolean textShown = waiter.waitForText(text, 1, WAIT_TIME, true) != null;\n\n\t\tif(subMenu && (viewFetcher.getCurrentViews(TextView.class, true).size() > 5) && !textShown){\n\t\t\tfor(TextView textView : viewFetcher.getCurrentViews(TextView.class, true)){\n\t\t\t\tx = xy[0];\n\t\t\t\ty = xy[1];\n\t\t\t\ttextView.getLocationOnScreen(xy);\n\n\t\t\t\tif(xy[0] > x || xy[1] > y)\n\t\t\t\t\ttextMore = textView;\n\t\t\t}\n\t\t}\n\t\tif(textMore != null)\n\t\t\tclickOnScreen(textMore);\n\n\t\tclickOnText(text, false, 1, true, 0);\n\t}\n\n\t/**\n\t * Clicks on an ActionBar item with a given resource id\n\t *\n\t * @param resourceId the R.id of the ActionBar item\n\t */\n\n\tpublic void clickOnActionBarItem(int resourceId){\n\t\tsleeper.sleep();\n\t\tActivity activity = activityUtils.getCurrentActivity();\n\t\tif(activity != null){\n\t\t\tinst.invokeMenuActionSync(activity, resourceId, 0);\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on an ActionBar Home/Up button.\n\t */\n\n\tpublic void clickOnActionBarHomeButton() {\n\t\tActivity activity = activityUtils.getCurrentActivity();\n\t\tMenuItem homeMenuItem = null;\n\n\t\ttry {\n\t\t\tClass> cls = Class.forName(\"com.android.internal.view.menu.ActionMenuItem\");\n\t\t\tClass> partypes[] = new Class[6];\n\t\t\tpartypes[0] = Context.class;\n\t\t\tpartypes[1] = Integer.TYPE;\n\t\t\tpartypes[2] = Integer.TYPE;\n\t\t\tpartypes[3] = Integer.TYPE;\n\t\t\tpartypes[4] = Integer.TYPE;\n\t\t\tpartypes[5] = CharSequence.class;\n\t\t\tConstructor> ct = cls.getConstructor(partypes);\n\t\t\tObject argList[] = new Object[6];\n\t\t\targList[0] = activity;\n\t\t\targList[1] = 0;\n\t\t\targList[2] = android.R.id.home;\n\t\t\targList[3] = 0;\n\t\t\targList[4] = 0;\n\t\t\targList[5] = \"\";\n\t\t\thomeMenuItem = (MenuItem) ct.newInstance(argList);\n\t\t} catch (Exception ex) {\n\t\t\tLog.d(LOG_TAG, \"Can not find methods to invoke Home button!\");\n\t\t}\n\n\t\tif (homeMenuItem != null) {\n\t\t\ttry{\n\t\t\t\tactivity.getWindow().getCallback().onMenuItemSelected(Window.FEATURE_OPTIONS_PANEL, homeMenuItem);\n\t\t\t}catch(Exception ignored) {}\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on a web element using the given By method.\n\t *\n\t * @param by the By object e.g. By.id(\"id\");\n\t * @param match if multiple objects match, this determines which one will be clicked\n\t * @param scroll true if scrolling should be performed\n\t * @param useJavaScriptToClick true if click should be perfomed through JavaScript\n\t */\n\n\tpublic void clickOnWebElement(By by, int match, boolean scroll, boolean useJavaScriptToClick){\n\t\tWebElement webElement = null;\n\t\t\n\t\tif(useJavaScriptToClick){\n\t\t\twebElement = waiter.waitForWebElement(by, match, Timeout.getSmallTimeout(), false);\n\t\t\tif(webElement == null){\n\t\t\t\tAssert.fail(\"WebElement with \" + webUtils.splitNameByUpperCase(by.getClass().getSimpleName()) + \": '\" + by.getValue() + \"' is not found!\");\n\t\t\t}\n\t\t\twebUtils.executeJavaScript(by, true);\n\t\t\treturn;\n\t\t}\n\t\t\n\t\tWebElement webElementToClick = waiter.waitForWebElement(by, match, Timeout.getSmallTimeout(), scroll);\n\t\t\n\t\tif(webElementToClick == null){\n\t\t\tif(match > 1) {\n\t\t\t\tAssert.fail(match + \" WebElements with \" + webUtils.splitNameByUpperCase(by.getClass().getSimpleName()) + \": '\" + by.getValue() + \"' are not found!\");\n\t\t\t}\n\t\t\telse {\n\t\t\t\tAssert.fail(\"WebElement with \" + webUtils.splitNameByUpperCase(by.getClass().getSimpleName()) + \": '\" + by.getValue() + \"' is not found!\");\n\t\t\t}\n\t\t}\n\t\t\n\t\tclickOnScreen(webElementToClick.getLocationX(), webElementToClick.getLocationY(), null);\n\t}\n\n\n\t/**\n\t * Clicks on a specific {@link TextView} displaying a given text.\n\t *\n\t * @param regex the text that should be clicked on. The parameter will be interpreted as a regular expression.\n\t * @param longClick {@code true} if the click should be a long click\n\t * @param match the regex match that should be clicked on\n\t * @param scroll true if scrolling should be performed\n\t * @param time the amount of time to long click\n\t */\n\n\tpublic void clickOnText(String regex, boolean longClick, int match, boolean scroll, int time) {\n\t\tTextView textToClick = waiter.waitForText(regex, match, Timeout.getSmallTimeout(), scroll, true, false);\n\n\t\tif (textToClick != null) {\n\t\t\tclickOnScreen(textToClick, longClick, time);\n\t\t}\n\n\t\telse {\n\n\t\t\tif(match > 1){\n\t\t\t\tAssert.fail(match + \" matches of text string: '\" + regex + \"' are not found!\");\n\t\t\t}\n\n\t\t\telse{\n\t\t\t\tArrayList allTextViews = RobotiumUtils.removeInvisibleViews(viewFetcher.getCurrentViews(TextView.class, true));\n\t\t\t\tallTextViews.addAll((Collection extends TextView>) webUtils.getTextViewsFromWebView());\n\n\t\t\t\tfor (TextView textView : allTextViews) {\n\t\t\t\t\tLog.d(LOG_TAG, \"'\" + regex + \"' not found. Have found: '\" + textView.getText() + \"'\");\n\t\t\t\t}\n\t\t\t\tallTextViews = null;\n\t\t\t\tAssert.fail(\"Text string: '\" + regex + \"' is not found!\");\n\t\t\t}\n\t\t}\n\t}\n\n\n\t/**\n\t * Clicks on a {@code View} of a specific class, with a given text.\n\t *\n\t * @param viewClass what kind of {@code View} to click, e.g. {@code Button.class} or {@code TextView.class}\n\t * @param nameRegex the name of the view presented to the user. The parameter will be interpreted as a regular expression.\n\t */\n\n\tpublic void clickOn(Class viewClass, String nameRegex) {\n\t\tT viewToClick = (T) waiter.waitForText(viewClass, nameRegex, 0, Timeout.getSmallTimeout(), true, true, false);\n\n\t\tif (viewToClick != null) {\n\t\t\tclickOnScreen(viewToClick);\n\t\t} else {\n\t\t\tArrayList allTextViews = RobotiumUtils.removeInvisibleViews(viewFetcher.getCurrentViews(viewClass, true));\n\n\t\t\tfor (T view : allTextViews) {\n\t\t\t\tLog.d(LOG_TAG, \"'\" + nameRegex + \"' not found. Have found: '\" + view.getText() + \"'\");\n\t\t\t}\n\t\t\tAssert.fail(viewClass.getSimpleName() + \" with text: '\" + nameRegex + \"' is not found!\");\n\t\t}\n\t}\n\n\t/**\n\t * Clicks on a {@code View} of a specific class, with a certain index.\n\t *\n\t * @param viewClass what kind of {@code View} to click, e.g. {@code Button.class} or {@code ImageView.class}\n\t * @param index the index of the {@code View} to be clicked, within {@code View}s of the specified class\n\t */\n\n\tpublic void clickOn(Class viewClass, int index) {\n\t\tclickOnScreen(waiter.waitForAndGetView(index, viewClass));\n\t}\n\n\n\t/**\n\t * Clicks on a certain list line and returns the {@link TextView}s that\n\t * the list line is showing. Will use the first list it finds.\n\t *\n\t * @param line the line that should be clicked\n\t * @return a {@code List} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInList(int line) {\n\t\treturn clickInList(line, 0, 0, false, 0);\n\t}\n\t\n\t/**\n\t * Clicks on a View with a specified resource id located in a specified list line\n\t *\n\t * @param line the line where the View is located\n\t * @param id the resource id of the View\n\t */\n\n\tpublic void clickInList(int line, int id) {\n\t\tclickInList(line, 0, id, false, 0);\n\t}\n\n\t/**\n\t * Clicks on a certain list line on a specified List and\n\t * returns the {@link TextView}s that the list line is showing.\n\t *\n\t * @param line the line that should be clicked\n\t * @param index the index of the list. E.g. Index 1 if two lists are available\n\t * @param id the resource id of the View to click\n\t * @return an {@code ArrayList} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInList(int line, int index, int id, boolean longClick, int time) {\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\n\t\tint lineIndex = line - 1;\n\t\tif(lineIndex < 0)\n\t\t\tlineIndex = 0;\n\n\t\tArrayList views = new ArrayList();\n\t\tfinal AbsListView absListView = waiter.waitForAndGetView(index, AbsListView.class);\n\n\t\tif(absListView == null)\n\t\t\tAssert.fail(\"AbsListView is null!\");\n\n\t\tfailIfIndexHigherThenChildCount(absListView, lineIndex, endTime);\n\n\t\tView viewOnLine = getViewOnAbsListLine(absListView, index, lineIndex);\n\n\t\tif(viewOnLine != null){\n\t\t\tviews = viewFetcher.getViews(viewOnLine, true);\n\t\t\tviews = RobotiumUtils.removeInvisibleViews(views);\n\n\t\t\tif(id == 0){\n\t\t\t\tclickOnScreen(viewOnLine, longClick, time);\n\t\t\t}\n\t\t\telse{\n\t\t\t\tclickOnScreen(getView(id, views));\n\t\t\t}\n\t\t}\n\t\treturn RobotiumUtils.filterViews(TextView.class, views);\n\t}\n\t\n\t/**\n\t * Clicks on a certain list line and returns the {@link TextView}s that\n\t * the list line is showing. Will use the first list it finds.\n\t *\n\t * @param line the line that should be clicked\n\t * @return a {@code List} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInRecyclerView(int line) {\n\t\treturn clickInRecyclerView(line, 0, 0, false, 0);\n\t}\n\t\n\t/**\n\t * Clicks on a View with a specified resource id located in a specified RecyclerView itemIndex\n\t *\n\t * @param itemIndex the index where the View is located\n\t * @param id the resource id of the View\n\t */\n\n\tpublic void clickInRecyclerView(int itemIndex, int id) {\n\t\tclickInRecyclerView(itemIndex, 0, id, false, 0);\n\t}\n\n\t\n\t/**\n\t * Clicks on a certain list line on a specified List and\n\t * returns the {@link TextView}s that the list line is showing.\n\t *\n\t * @param itemIndex the item index that should be clicked\n\t * @param recyclerViewIndex the index of the RecyclerView. E.g. Index 1 if two RecyclerViews are available\n\t * @param id the resource id of the View to click\n\t * @return an {@code ArrayList} of the {@code TextView}s located in the list line\n\t */\n\n\tpublic ArrayList clickInRecyclerView(int itemIndex, int recyclerViewIndex, int id, boolean longClick, int time) {\n\t\tView viewOnLine = null;\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\n\t\tif(itemIndex < 0)\n\t\t\titemIndex = 0;\n\n\t\tArrayList views = new ArrayList();\n\t\tViewGroup recyclerView = viewFetcher.getRecyclerView(recyclerViewIndex, Timeout.getSmallTimeout());\n\t\t\n\t\tif(recyclerView == null){\n\t\t\tAssert.fail(\"RecyclerView is not found!\");\n\t\t}\n\t\telse{\n\t\t\tfailIfIndexHigherThenChildCount(recyclerView, itemIndex, endTime);\n\t\t\tviewOnLine = getViewOnRecyclerItemIndex((ViewGroup) recyclerView, recyclerViewIndex, itemIndex);\n\t\t}\n\t\t\n\t\tif(viewOnLine != null){\n\t\t\tviews = viewFetcher.getViews(viewOnLine, true);\n\t\t\tviews = RobotiumUtils.removeInvisibleViews(views);\n\t\t\t\n\t\t\tif(id == 0){\n\t\t\t\tclickOnScreen(viewOnLine, longClick, time);\n\t\t\t}\n\t\t\telse{\n\t\t\t\tclickOnScreen(getView(id, views));\n\t\t\t}\n\t\t}\n\t\treturn RobotiumUtils.filterViews(TextView.class, views);\n\t}\n\t\n\tprivate View getView(int id, List views){\n\t\tfor(View view : views){\n\t\t\tif(id == view.getId()){\n\t\t\t\treturn view;\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n\t\n\tprivate void failIfIndexHigherThenChildCount(ViewGroup viewGroup, int index, long endTime){\n\t\twhile(index > viewGroup.getChildCount()){\n\t\t\tfinal boolean timedOut = SystemClock.uptimeMillis() > endTime;\n\t\t\tif (timedOut){\n\t\t\t\tint numberOfIndexes = viewGroup.getChildCount();\n\t\t\t\tAssert.fail(\"Can not click on index \" + index + \" as there are only \" + numberOfIndexes + \" indexes available\");\n\t\t\t}\n\t\t\tsleeper.sleep();\n\t\t}\n\t}\n\t\n\n\t/**\n\t * Returns the view in the specified list line\n\t * \n\t * @param absListView the ListView to use\n\t * @param index the index of the list. E.g. Index 1 if two lists are available\n\t * @param lineIndex the line index of the View\n\t * @return the View located at a specified list line\n\t */\n\n\tprivate View getViewOnAbsListLine(AbsListView absListView, int index, int lineIndex){\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\t\tView view = absListView.getChildAt(lineIndex);\n\n\t\twhile(view == null){\n\t\t\tfinal boolean timedOut = SystemClock.uptimeMillis() > endTime;\n\t\t\tif (timedOut){\n\t\t\t\tAssert.fail(\"View is null and can therefore not be clicked!\");\n\t\t\t}\n\t\t\t\n\t\t\tsleeper.sleep();\n\t\t\tabsListView = (AbsListView) viewFetcher.getIdenticalView(absListView);\n\n\t\t\tif(absListView == null){\n\t\t\t\tabsListView = waiter.waitForAndGetView(index, AbsListView.class);\n\t\t\t}\n\t\t\t\n\t\t\tview = absListView.getChildAt(lineIndex);\n\t\t}\n\t\treturn view;\n\t}\n\t\n\t/**\n\t * Returns the view in the specified item index\n\t * \n\t * @param recyclerView the RecyclerView to use\n\t * @param itemIndex the item index of the View\n\t * @return the View located at a specified item index\n\t */\n\n\tprivate View getViewOnRecyclerItemIndex(ViewGroup recyclerView, int recyclerViewIndex, int itemIndex){\n\t\tfinal long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout();\n\t\tView view = recyclerView.getChildAt(itemIndex);\n\n\t\twhile(view == null){\n\t\t\tfinal boolean timedOut = SystemClock.uptimeMillis() > endTime;\n\t\t\tif (timedOut){\n\t\t\t\tAssert.fail(\"View is null and can therefore not be clicked!\");\n\t\t\t}\n\n\t\t\tsleeper.sleep();\n\t\t\trecyclerView = (ViewGroup) viewFetcher.getIdenticalView(recyclerView);\n\n\t\t\tif(recyclerView == null){\n\t\t\t\trecyclerView = (ViewGroup) viewFetcher.getRecyclerView(false, recyclerViewIndex);\n\t\t\t}\n\n\t\t\tif(recyclerView != null){\n\t\t\t\tview = recyclerView.getChildAt(itemIndex);\n\t\t\t}\n\t\t}\n\t\treturn view;\n\t}\n\t\n\t\n}\n"}}
-{"repo": "ivanacostarubio/bartender", "pr_number": 2, "title": "1140 Grid added", "state": "closed", "merged_at": "2012-09-13T23:37:06Z", "additions": 1373, "deletions": 47, "files_changed": ["app.rb", "public/js/css3-mediaqueries.js", "public/stylesheets/1140.css", "public/stylesheets/grid-styles.css", "public/stylesheets/ie.css", "public/stylesheets/styles.css"], "files_before": {"app.rb": "require 'rubygems'\nrequire 'sinatra/base'\nrequire 'slim'\nrequire 'sass'\nrequire 'mongoid'\n\nMongoid.load!(\"config/mongoid.yml\")\n\nSlim::Engine.set_default_options :sections => false\n\nclass App < Sinatra::Base\n\n set :public, File.join(File.dirname(__FILE__), 'public')\n set :views, File.join(File.dirname(__FILE__), 'views')\n\n helpers do\n def partial(page, options={})\n haml page, options.merge!(:layout => false)\n end\n end\n\n\n get('/') do \n slim :index\n end\n\n get('/styles') do \n slim :styles\n end\n\nend\n", "public/stylesheets/styles.css": "/*\nSyntax error: Invalid CSS after \"...nge: background\": expected selector or at-rule, was \": $orange; /* f...\"\n on line 11 of public/stylesheets/styles.scss\n\n6: $vine: #ac3900;\n7: $white: #ffffff;\n8: $black: #000000;\n9: $lightgrey: #e6e5e5;\n10: $textgrey: #959494;\n11: $gradientorange: background: $orange; /* for non-css3 browsers */\n12: filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='$orange', endColorstr='$orangered'); /* for IE */\n13: background: -webkit-gradient(linear, left top, left bottom, from($orange), to($orangered)); /* for webkit browsers */\n14: background: -moz-linear-gradient(top, $orange, $orangered); /* for firefox 3.6+ */ \n15: ;\n16: \n\nBacktrace:\npublic/stylesheets/styles.scss:11\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/scss/parser.rb:1130:in `expected'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/scss/parser.rb:1066:in `expected'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/scss/parser.rb:28:in `parse'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/engine.rb:342:in `_to_tree'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/engine.rb:315:in `_render'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/engine.rb:262:in `render'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/plugin/compiler.rb:340:in `update_stylesheet'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/plugin/compiler.rb:202:in `block in update_stylesheets'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/plugin/compiler.rb:200:in `each'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/plugin/compiler.rb:200:in `update_stylesheets'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/lib/sass/plugin/compiler.rb:298:in `block in watch'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/vendor/listen/lib/listen/multi_listener.rb:86:in `call'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/vendor/listen/lib/listen/multi_listener.rb:86:in `on_change'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/vendor/listen/lib/listen/multi_listener.rb:95:in `block in initialize_adapter'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/vendor/listen/lib/listen/adapters/polling.rb:55:in `call'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/vendor/listen/lib/listen/adapters/polling.rb:55:in `poll'\n/Users/bellatrix/.rvm/gems/ruby-1.9.3-p0@bartender/gems/sass-3.2.1/vendor/listen/lib/listen/adapters/polling.rb:31:in `block in start'\n*/\nbody:before {\n white-space: pre;\n font-family: monospace;\n content: \"Syntax error: Invalid CSS after \\\"...nge: background\\\": expected selector or at-rule, was \\\": $orange; /* f...\\\"\\A on line 11 of public/stylesheets/styles.scss\\A \\A 6: $vine: #ac3900;\\A 7: $white: #ffffff;\\A 8: $black: #000000;\\A 9: $lightgrey: #e6e5e5;\\A 10: $textgrey: #959494;\\A 11: $gradientorange: background: $orange; /* for non-css3 browsers */\\A 12: filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='$orange', endColorstr='$orangered'); /* for IE */\\A 13: background: -webkit-gradient(linear, left top, left bottom, from($orange), to($orangered)); /* for webkit browsers */\\A 14: background: -moz-linear-gradient(top, $orange, $orangered); /* for firefox 3.6+ */ \\A 15: ;\\A 16: \"; }\n"}, "files_after": {"app.rb": "require 'rubygems'\nrequire 'sinatra/base'\nrequire 'slim'\nrequire 'sass'\nrequire 'mongoid'\n\nMongoid.load!(\"config/mongoid.yml\")\n\nSlim::Engine.set_default_options :sections => false\n\nclass App < Sinatra::Base\n\n set :public, File.join(File.dirname(__FILE__), 'public')\n set :views, File.join(File.dirname(__FILE__), 'views')\n\n helpers do\n def partial(page, options={})\n haml page, options.merge!(:layout => false)\n end\n end\n\n\n get('/') do \n slim :index\n end\n\n get('/styles') do \n slim :styles\n end\n \n get('/grid') do \n slim :grid\n end\n \n\nend\n", "public/js/css3-mediaqueries.js": "if(typeof Object.create!==\"function\"){\nObject.create=function(o){\nfunction F(){\n};\nF.prototype=o;\nreturn new F();\n};\n}\nvar ua={toString:function(){\nreturn navigator.userAgent;\n},test:function(s){\nreturn this.toString().toLowerCase().indexOf(s.toLowerCase())>-1;\n}};\nua.version=(ua.toString().toLowerCase().match(/[\\s\\S]+(?:rv|it|ra|ie)[\\/: ]([\\d.]+)/)||[])[1];\nua.webkit=ua.test(\"webkit\");\nua.gecko=ua.test(\"gecko\")&&!ua.webkit;\nua.opera=ua.test(\"opera\");\nua.ie=ua.test(\"msie\")&&!ua.opera;\nua.ie6=ua.ie&&document.compatMode&&typeof document.documentElement.style.maxHeight===\"undefined\";\nua.ie7=ua.ie&&document.documentElement&&typeof document.documentElement.style.maxHeight!==\"undefined\"&&typeof XDomainRequest===\"undefined\";\nua.ie8=ua.ie&&typeof XDomainRequest!==\"undefined\";\nvar domReady=function(){\nvar _1=[];\nvar _2=function(){\nif(!arguments.callee.done){\narguments.callee.done=true;\nfor(var i=0;i<_1.length;i++){\n_1[i]();\n}\n}\n};\nif(document.addEventListener){\ndocument.addEventListener(\"DOMContentLoaded\",_2,false);\n}\nif(ua.ie){\n(function(){\ntry{\ndocument.documentElement.doScroll(\"left\");\n}\ncatch(e){\nsetTimeout(arguments.callee,50);\nreturn;\n}\n_2();\n})();\ndocument.onreadystatechange=function(){\nif(document.readyState===\"complete\"){\ndocument.onreadystatechange=null;\n_2();\n}\n};\n}\nif(ua.webkit&&document.readyState){\n(function(){\nif(document.readyState!==\"loading\"){\n_2();\n}else{\nsetTimeout(arguments.callee,10);\n}\n})();\n}\nwindow.onload=_2;\nreturn function(fn){\nif(typeof fn===\"function\"){\n_1[_1.length]=fn;\n}\nreturn fn;\n};\n}();\nvar cssHelper=function(){\nvar _3={BLOCKS:/[^\\s{][^{]*\\{(?:[^{}]*\\{[^{}]*\\}[^{}]*|[^{}]*)*\\}/g,BLOCKS_INSIDE:/[^\\s{][^{]*\\{[^{}]*\\}/g,DECLARATIONS:/[a-zA-Z\\-]+[^;]*:[^;]+;/g,RELATIVE_URLS:/url\\(['\"]?([^\\/\\)'\"][^:\\)'\"]+)['\"]?\\)/g,REDUNDANT_COMPONENTS:/(?:\\/\\*([^*\\\\\\\\]|\\*(?!\\/))+\\*\\/|@import[^;]+;)/g,REDUNDANT_WHITESPACE:/\\s*(,|:|;|\\{|\\})\\s*/g,MORE_WHITESPACE:/\\s{2,}/g,FINAL_SEMICOLONS:/;\\}/g,NOT_WHITESPACE:/\\S+/g};\nvar _4,_5=false;\nvar _6=[];\nvar _7=function(fn){\nif(typeof fn===\"function\"){\n_6[_6.length]=fn;\n}\n};\nvar _8=function(){\nfor(var i=0;i<_6.length;i++){\n_6[i](_4);\n}\n};\nvar _9={};\nvar _a=function(n,v){\nif(_9[n]){\nvar _b=_9[n].listeners;\nif(_b){\nfor(var i=0;i<_b.length;i++){\n_b[i](v);\n}\n}\n}\n};\nvar _c=function(_d,_e,_f){\nif(ua.ie&&!window.XMLHttpRequest){\nwindow.XMLHttpRequest=function(){\nreturn new ActiveXObject(\"Microsoft.XMLHTTP\");\n};\n}\nif(!XMLHttpRequest){\nreturn \"\";\n}\nvar r=new XMLHttpRequest();\ntry{\nr.open(\"get\",_d,true);\nr.setRequestHeader(\"X_REQUESTED_WITH\",\"XMLHttpRequest\");\n}\ncatch(e){\n_f();\nreturn;\n}\nvar _10=false;\nsetTimeout(function(){\n_10=true;\n},5000);\ndocument.documentElement.style.cursor=\"progress\";\nr.onreadystatechange=function(){\nif(r.readyState===4&&!_10){\nif(!r.status&&location.protocol===\"file:\"||(r.status>=200&&r.status<300)||r.status===304||navigator.userAgent.indexOf(\"Safari\")>-1&&typeof r.status===\"undefined\"){\n_e(r.responseText);\n}else{\n_f();\n}\ndocument.documentElement.style.cursor=\"\";\nr=null;\n}\n};\nr.send(\"\");\n};\nvar _11=function(_12){\n_12=_12.replace(_3.REDUNDANT_COMPONENTS,\"\");\n_12=_12.replace(_3.REDUNDANT_WHITESPACE,\"$1\");\n_12=_12.replace(_3.MORE_WHITESPACE,\" \");\n_12=_12.replace(_3.FINAL_SEMICOLONS,\"}\");\nreturn _12;\n};\nvar _13={mediaQueryList:function(s){\nvar o={};\nvar idx=s.indexOf(\"{\");\nvar lt=s.substring(0,idx);\ns=s.substring(idx+1,s.length-1);\nvar mqs=[],rs=[];\nvar qts=lt.toLowerCase().substring(7).split(\",\");\nfor(var i=0;i-1&&_23.href&&_23.href.length!==0&&!_23.disabled){\n_1f[_1f.length]=_23;\n}\n}\nif(_1f.length>0){\nvar c=0;\nvar _24=function(){\nc++;\nif(c===_1f.length){\n_20();\n}\n};\nvar _25=function(_26){\nvar _27=_26.href;\n_c(_27,function(_28){\n_28=_11(_28).replace(_3.RELATIVE_URLS,\"url(\"+_27.substring(0,_27.lastIndexOf(\"/\"))+\"/$1)\");\n_26.cssHelperText=_28;\n_24();\n},_24);\n};\nfor(i=0;i<_1f.length;i++){\n_25(_1f[i]);\n}\n}else{\n_20();\n}\n};\nvar _29={mediaQueryLists:\"array\",rules:\"array\",selectors:\"object\",declarations:\"array\",properties:\"object\"};\nvar _2a={mediaQueryLists:null,rules:null,selectors:null,declarations:null,properties:null};\nvar _2b=function(_2c,v){\nif(_2a[_2c]!==null){\nif(_29[_2c]===\"array\"){\nreturn (_2a[_2c]=_2a[_2c].concat(v));\n}else{\nvar c=_2a[_2c];\nfor(var n in v){\nif(v.hasOwnProperty(n)){\nif(!c[n]){\nc[n]=v[n];\n}else{\nc[n]=c[n].concat(v[n]);\n}\n}\n}\nreturn c;\n}\n}\n};\nvar _2d=function(_2e){\n_2a[_2e]=(_29[_2e]===\"array\")?[]:{};\nfor(var i=0;i<_4.length;i++){\n_2b(_2e,_4[i].cssHelperParsed[_2e]);\n}\nreturn _2a[_2e];\n};\ndomReady(function(){\nvar els=document.body.getElementsByTagName(\"*\");\nfor(var i=0;i=_44)||(max&&_46<_44)||(!min&&!max&&_46===_44));\n}else{\nreturn false;\n}\n}else{\nreturn _46>0;\n}\n}else{\nif(\"device-height\"===_41.substring(l-13,l)){\n_47=screen.height;\nif(_42!==null){\nif(_43===\"length\"){\nreturn ((min&&_47>=_44)||(max&&_47<_44)||(!min&&!max&&_47===_44));\n}else{\nreturn false;\n}\n}else{\nreturn _47>0;\n}\n}else{\nif(\"width\"===_41.substring(l-5,l)){\n_46=document.documentElement.clientWidth||document.body.clientWidth;\nif(_42!==null){\nif(_43===\"length\"){\nreturn ((min&&_46>=_44)||(max&&_46<_44)||(!min&&!max&&_46===_44));\n}else{\nreturn false;\n}\n}else{\nreturn _46>0;\n}\n}else{\nif(\"height\"===_41.substring(l-6,l)){\n_47=document.documentElement.clientHeight||document.body.clientHeight;\nif(_42!==null){\nif(_43===\"length\"){\nreturn ((min&&_47>=_44)||(max&&_47<_44)||(!min&&!max&&_47===_44));\n}else{\nreturn false;\n}\n}else{\nreturn _47>0;\n}\n}else{\nif(\"device-aspect-ratio\"===_41.substring(l-19,l)){\nreturn _43===\"aspect-ratio\"&&screen.width*_44[1]===screen.height*_44[0];\n}else{\nif(\"color-index\"===_41.substring(l-11,l)){\nvar _48=Math.pow(2,screen.colorDepth);\nif(_42!==null){\nif(_43===\"absolute\"){\nreturn ((min&&_48>=_44)||(max&&_48<_44)||(!min&&!max&&_48===_44));\n}else{\nreturn false;\n}\n}else{\nreturn _48>0;\n}\n}else{\nif(\"color\"===_41.substring(l-5,l)){\nvar _49=screen.colorDepth;\nif(_42!==null){\nif(_43===\"absolute\"){\nreturn ((min&&_49>=_44)||(max&&_49<_44)||(!min&&!max&&_49===_44));\n}else{\nreturn false;\n}\n}else{\nreturn _49>0;\n}\n}else{\nif(\"resolution\"===_41.substring(l-10,l)){\nvar res;\nif(_45===\"dpcm\"){\nres=_3d(\"1cm\");\n}else{\nres=_3d(\"1in\");\n}\nif(_42!==null){\nif(_43===\"resolution\"){\nreturn ((min&&res>=_44)||(max&&res<_44)||(!min&&!max&&res===_44));\n}else{\nreturn false;\n}\n}else{\nreturn res>0;\n}\n}else{\nreturn false;\n}\n}\n}\n}\n}\n}\n}\n}\n};\nvar _4a=function(mq){\nvar _4b=mq.getValid();\nvar _4c=mq.getExpressions();\nvar l=_4c.length;\nif(l>0){\nfor(var i=0;i0){\ns[c++]=\",\";\n}\ns[c++]=n;\n}\n}\nif(s.length>0){\n_39[_39.length]=cssHelper.addStyle(\"@media \"+s.join(\"\")+\"{\"+mql.getCssText()+\"}\",false);\n}\n};\nvar _4e=function(_4f){\nfor(var i=0;i<_4f.length;i++){\n_4d(_4f[i]);\n}\nif(ua.ie){\ndocument.documentElement.style.display=\"block\";\nsetTimeout(function(){\ndocument.documentElement.style.display=\"\";\n},0);\nsetTimeout(function(){\ncssHelper.broadcast(\"cssMediaQueriesTested\");\n},100);\n}else{\ncssHelper.broadcast(\"cssMediaQueriesTested\");\n}\n};\nvar _50=function(){\nfor(var i=0;i<_39.length;i++){\ncssHelper.removeStyle(_39[i]);\n}\n_39=[];\ncssHelper.mediaQueryLists(_4e);\n};\nvar _51=0;\nvar _52=function(){\nvar _53=cssHelper.getViewportWidth();\nvar _54=cssHelper.getViewportHeight();\nif(ua.ie){\nvar el=document.createElement(\"div\");\nel.style.position=\"absolute\";\nel.style.top=\"-9999em\";\nel.style.overflow=\"scroll\";\ndocument.body.appendChild(el);\n_51=el.offsetWidth-el.clientWidth;\ndocument.body.removeChild(el);\n}\nvar _55;\nvar _56=function(){\nvar vpw=cssHelper.getViewportWidth();\nvar vph=cssHelper.getViewportHeight();\nif(Math.abs(vpw-_53)>_51||Math.abs(vph-_54)>_51){\n_53=vpw;\n_54=vph;\nclearTimeout(_55);\n_55=setTimeout(function(){\nif(!_3a()){\n_50();\n}else{\ncssHelper.broadcast(\"cssMediaQueriesTested\");\n}\n},500);\n}\n};\nwindow.onresize=function(){\nvar x=window.onresize||function(){\n};\nreturn function(){\nx();\n_56();\n};\n}();\n};\nvar _57=document.documentElement;\n_57.style.marginLeft=\"-32767px\";\nsetTimeout(function(){\n_57.style.marginTop=\"\";\n},20000);\nreturn function(){\nif(!_3a()){\ncssHelper.addListener(\"newStyleParsed\",function(el){\n_4e(el.cssHelperParsed.mediaQueryLists);\n});\ncssHelper.addListener(\"cssMediaQueriesTested\",function(){\nif(ua.ie){\n_57.style.width=\"1px\";\n}\nsetTimeout(function(){\n_57.style.width=\"\";\n_57.style.marginLeft=\"\";\n},0);\ncssHelper.removeListener(\"cssMediaQueriesTested\",arguments.callee);\n});\n_3c();\n_50();\n}else{\n_57.style.marginLeft=\"\";\n}\n_52();\n};\n}());\ntry{\ndocument.execCommand(\"BackgroundImageCache\",false,true);\n}\ncatch(e){\n}\n\r\n", "public/stylesheets/1140.css": "/* CSS Resets */\n\nhtml,body,div,span,object,iframe,h1,h2,h3,h4,h5,h6,p,blockquote,pre,a,abbr,address,cite,code,del,dfn,em,img,ins,q,small,strong,sub,sup,dl,dt,dd,ol,ul,li,fieldset,form,label,legend,table,caption,tbody,tfoot,thead,tr,th,td{border:0;margin:0;padding:0}article,aside,figure,figure img,figcaption,hgroup,footer,header,nav,section,video,object{display:block}a img{border:0}figure{position:relative}figure img{width:100%}\n\n\n/* ==================================================================================================================== */\n/* ! The 1140px Grid V2 by Andy Taylor \\ http://cssgrid.net \\ http://www.twitter.com/andytlr \\ http://www.andytlr.com */\n/* ==================================================================================================================== */\n\n.container {\npadding-left: 20px;\npadding-right: 20px;\n}\n\n.row {\nwidth: 100%;\nmax-width: 1140px;\nmin-width: 755px;\nmargin: 0 auto;\noverflow: hidden;\n}\n\n.onecol, .twocol, .threecol, .fourcol, .fivecol, .sixcol, .sevencol, .eightcol, .ninecol, .tencol, .elevencol {\nmargin-right: 3.8%;\nfloat: left;\nmin-height: 1px;\n}\n\n.row .onecol {\nwidth: 4.85%;\n}\n\n.row .twocol {\nwidth: 13.45%;\n}\n\n.row .threecol {\nwidth: 22.05%;\n}\n\n.row .fourcol {\nwidth: 30.75%;\n}\n\n.row .fivecol {\nwidth: 39.45%;\n}\n\n.row .sixcol {\nwidth: 48%;\n}\n\n.row .sevencol {\nwidth: 56.75%;\n}\n\n.row .eightcol {\nwidth: 65.4%;\n}\n\n.row .ninecol {\nwidth: 74.05%;\n}\n\n.row .tencol {\nwidth: 82.7%;\n}\n\n.row .elevencol {\nwidth: 91.35%;\n}\n\n.row .twelvecol {\nwidth: 100%;\nfloat: left;\n}\n\n.last {\nmargin-right: 0px;\n}\n\nimg, object, embed {\nmax-width: 100%;\n}\n\nimg {\n\theight: auto;\n}\n\n\n/* Smaller screens */\n\n@media only screen and (max-width: 1023px) {\n\n\tbody {\n\tfont-size: 0.8em;\n\tline-height: 1.5em;\n\t}\n\t\n\t}\n\n\n/* Mobile */\n\n@media handheld, only screen and (max-width: 767px) {\n\n\tbody {\n\tfont-size: 16px;\n\t-webkit-text-size-adjust: none;\n\t}\n\t\n\t.row, body, .container {\n\twidth: 100%;\n\tmin-width: 0;\n\tmargin-left: 0px;\n\tmargin-right: 0px;\n\tpadding-left: 0px;\n\tpadding-right: 0px;\n\t}\n\t\n\t.row .onecol, .row .twocol, .row .threecol, .row .fourcol, .row .fivecol, .row .sixcol, .row .sevencol, .row .eightcol, .row .ninecol, .row .tencol, .row .elevencol, .row .twelvecol {\n\twidth: auto;\n\tfloat: none;\n\tmargin-left: 0px;\n\tmargin-right: 0px;\n\tpadding-left: 20px;\n\tpadding-right: 20px;\n\t}\n\n}", "public/stylesheets/grid-styles.css": "/* ============================== */\n/* ! Layout for desktop version */\n/* ============================== */\n\n\tbody {\n\t\t\n\t}\n\t\n\n/* ============================= */\n/* ! Layout for mobile version */\n/* ============================= */\n\n@media handheld, only screen and (max-width: 767px) {\n\n\tbody {\n\t\t\n\t}\n\n}\n\n\n/* ========================================== */\n/* ! Provide higher res assets for iPhone 4 */\n/* ========================================== */\n\n@media only screen and (-webkit-min-device-pixel-ratio: 2) { \n\n/*\t.logo {\n\t\tbackground: url(logo2x.jpg) no-repeat;\n\t\tbackground-size: 212px 303px;\n\t}*/\n\n}", "public/stylesheets/ie.css": ".onecol {\nwidth: 4.7%;\n}\n\n.twocol {\nwidth: 13.2%;\n}\n\n.threecol {\nwidth: 22.05%;\n}\n\n.fourcol {\nwidth: 30.6%;\n}\n\n.fivecol {\nwidth: 39%;\n}\n\n.sixcol {\nwidth: 48%;\n}\n\n.sevencol {\nwidth: 56.75%;\n}\n\n.eightcol {\nwidth: 61.6%;\n}\n\n.ninecol {\nwidth: 74.05%;\n}\n\n.tencol {\nwidth: 82%;\n}\n\n.elevencol {\nwidth: 91.35%;\n}", "public/stylesheets/styles.css": ".clear {\n clear: both; }\n\n#wrapper {\n width: 1024px;\n margin: 50px auto; }\n\nhr {\n border-bottom: #e6e5e5;\n opacity: 0.3; }\n\nh1, h2, h3, h4, p, a, li {\n font-family: \"HelveticaNeue-Light\", \"Helvetica Neue Light\", \"Helvetica Neue\", Helvetica, Arial, \"Lucida Grande\", sans-serif;\n font-weight: 100;\n color: #282827; }\n\nh1 {\n font-size: 6em;\n letter-spacing: -6px;\n line-height: 0.95em;\n margin: 10px 0;\n color: black; }\n\nh2 {\n font-size: 4em;\n letter-spacing: -4px;\n line-height: 0.85em; }\n\nh3 {\n font-size: 3em;\n letter-spacing: -3px;\n line-height: 0.85em;\n margin: 10px 0 20px 0; }\n\nh4 {\n font-size: 2em;\n letter-spacing: -0.5px;\n line-height: 0.85em;\n margin: 10px 0 20px 0; }\n\np, a, li {\n font-size: 1.2em; }\n\na {\n color: #fc5401; }\n a:hover {\n color: #282827; }\n\nli {\n line-height: 30px; }\n\n.button.orange {\n border: 1px solid #fc5401;\n -webkit-border-radius: 4px;\n border-radius: 4px;\n float: left;\n color: white;\n padding: 10px 20px;\n box-shadow: 0 3px 2px -2px gray;\n margin: 20px 0;\n background: #e37600;\n /* for non-css3 browsers */\n filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#e37600', endColorstr='#fc5401');\n /* for IE */\n background: -webkit-gradient(linear, left top, left bottom, from(#e37600), to(#fc5401));\n /* for webkit browsers */\n background: -moz-linear-gradient(top, #e37600, #fc5401);\n /* for firefox 3.6+ */\n border: 1px solid #fc5401; }\n .button.orange a {\n text-decoration: none; }\n .button.orange p {\n margin: 0;\n font-size: 1em;\n color: white; }\n .button.orange:hover {\n background: #fc5401;\n /* for non-css3 browsers */\n filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fc5401', endColorstr='#e37600');\n /* for IE */\n background: -webkit-gradient(linear, left top, left bottom, from(#fc5401), to(#e37600));\n /* for webkit browsers */\n background: -moz-linear-gradient(top, #fc5401, #e37600);\n /* for firefox 3.6+ */\n border: 1px solid #fc5401; }\n\n.title {\n width: 100%;\n border-bottom: 3px solid black;\n margin: 0 0 40px 0; }\n\n.twocolumn {\n width: 50%;\n float: left;\n margin: 0 0 50px 0; }\n\nform {\n width: 100%;\n padding: 10px;\n background: white;\n /* for non-css3 browsers */\n filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='$white', endColorstr='$lightgrey');\n /* for IE */\n background: -webkit-gradient(linear, left top, left bottom, from(white), to(#e6e5e5));\n /* for webkit browsers */\n background: -moz-linear-gradient(top, white, #e6e5e5);\n /* for firefox 3.6+ */\n border: 1px solid #e6e5e5;\n position: relative;\n margin: 20px 0 50px 0; }\n\nform:before, form:after {\n content: \"\";\n position: absolute;\n z-index: -1;\n bottom: 8px;\n left: 10px;\n width: 60%;\n height: 20%;\n max-width: 300px;\n -webkit-box-shadow: 0 10px 10px rgba(0, 0, 0, 0.5);\n -moz-box-shadow: 0 10px 10px rgba(0, 0, 0, 0.5);\n box-shadow: 0 10px 10px rgba(0, 0, 0, 0.5);\n -webkit-transform: rotate(-3deg);\n -moz-transform: rotate(-3deg);\n -o-transform: rotate(-3deg);\n transform: rotate(-3deg); }\n\nform:after {\n right: 10px;\n left: auto;\n -webkit-transform: rotate(3deg);\n -moz-transform: rotate(3deg);\n -o-transform: rotate(3deg);\n transform: rotate(3deg); }\n\ninput, textarea, select {\n width: 95%;\n border: 1px solid #e6e5e5;\n height: 35px;\n padding: 5px 10px;\n margin: 0 0 10px 0;\n font-family: \"HelveticaNeue-Light\", \"Helvetica Neue Light\", \"Helvetica Neue\", Helvetica, Arial, \"Lucida Grande\", sans-serif;\n font-size: 18px;\n color: #959494; }\n\nselect {\n width: 99%;\n height: 40px;\n font-size: 1.2em; }\n\ntextarea {\n height: 100px; }\n\nlabel.label_input {\n width: 95%;\n position: absolute;\n top: 1px;\n right: 1px;\n bottom: 1px;\n left: 2px;\n z-index: 1;\n padding: 13px 3px 13px 10px;\n line-height: 20px;\n white-space: nowrap;\n cursor: text;\n display: block;\n font-size: 1.1em;\n font-family: \"HelveticaNeue-Light\", \"Helvetica Neue Light\", \"Helvetica Neue\", Helvetica, Arial, \"Lucida Grande\", sans-serif;\n color: #959494; }\n\nlabel {\n font-size: 1.2em;\n font-family: \"HelveticaNeue-Light\", \"Helvetica Neue Light\", \"Helvetica Neue\", Helvetica, Arial, \"Lucida Grande\", sans-serif;\n color: #959494;\n float: left; }\n\n.input-placeholder {\n position: relative; }\n\n.button.orange.send {\n width: 30%;\n margin: 10px 0 10px 150px;\n line-height: 0;\n letter-spacing: 0; }\n\n.boxui.header, .boxui.box {\n width: 100%;\n -webkit-border-radius: 4px 4px 0 0;\n border-radius: 4px 4px 0 0;\n background: white;\n border: 1px solid #e6e5e5;\n margin: 40px 0 0 0;\n background: white;\n /* for non-css3 browsers */\n filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='$white', endColorstr='$lightgrey');\n /* for IE */\n background: -webkit-gradient(linear, left top, left bottom, from(white), to(#e6e5e5));\n /* for webkit browsers */\n background: -moz-linear-gradient(top, white, #e6e5e5);\n /* for firefox 3.6+ */\n padding: 10px; }\n .boxui.header p, .boxui.box p {\n margin: 0; }\n\n.boxui.box {\n -webkit-border-radius: 0 0 4px 4px;\n border-radius: 0 0 4px 4px;\n margin: 0;\n background: white; }\n\nlabel, select {\n font-size: 1.1em; }\n\n.radio {\n width: 5%;\n float: left;\n height: 22px; }\n\n.container p {\n color: #fff;\n line-height: 100px;\n background: #000;\n text-align: center;\n margin: 20px 0 0 0; }\n"}}
-{"repo": "hifi/cnc-ddraw", "pr_number": 5, "title": "Merged improvements by others", "state": "open", "merged_at": null, "additions": 6498, "deletions": 54, "files_changed": ["main.c", "render.c", "render_soft.c"], "files_before": {"main.c": "/*\n * Copyright (c) 2010 Toni Spets \n *\n * Permission to use, copy, modify, and distribute this software for any\n * purpose with or without fee is hereby granted, provided that the above\n * copyright notice and this permission notice appear in all copies.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\n * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\n * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\n * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\n * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\n * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n */\n\n#include \n#include \n#include \n#include \n#include \"ddraw.h\"\n\n#include \"main.h\"\n#include \"palette.h\"\n#include \"surface.h\"\n#include \"clipper.h\"\n\n/* from mouse.c */\nBOOL WINAPI fake_GetCursorPos(LPPOINT lpPoint);\nvoid mouse_init(HWND);\nvoid mouse_lock();\nvoid mouse_unlock();\n\n/* from screenshot.c */\n#ifdef HAVE_LIBPNG\nBOOL screenshot(struct IDirectDrawSurfaceImpl *);\n#endif\n\nIDirectDrawImpl *ddraw = NULL;\n\nDWORD WINAPI render_main(void);\nDWORD WINAPI render_soft_main(void);\nDWORD WINAPI render_dummy_main(void);\n\nHRESULT __stdcall ddraw_Compact(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::Compact(This=%p)\\n\", This);\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_DuplicateSurface(IDirectDrawImpl *This, LPDIRECTDRAWSURFACE src, LPDIRECTDRAWSURFACE *dest)\n{\n printf(\"DirectDraw::DuplicateSurface(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_EnumDisplayModes(IDirectDrawImpl *This, DWORD a, LPDDSURFACEDESC b, LPVOID c, LPDDENUMMODESCALLBACK d)\n{\n printf(\"DirectDraw::EnumDisplayModes(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_EnumSurfaces(IDirectDrawImpl *This, DWORD a, LPDDSURFACEDESC b, LPVOID c, LPDDENUMSURFACESCALLBACK d)\n{\n printf(\"DirectDraw::EnumSurfaces(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_FlipToGDISurface(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::FlipToGDISurface(This=%p)\\n\", This);\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetCaps(IDirectDrawImpl *This, LPDDCAPS lpDDDriverCaps, LPDDCAPS lpDDEmulCaps)\n{\n printf(\"DirectDraw::GetCaps(This=%p, lpDDDriverCaps=%p, lpDDEmulCaps=%p)\\n\", This, lpDDDriverCaps, lpDDEmulCaps);\n\n if(lpDDDriverCaps)\n {\n lpDDDriverCaps->dwSize = sizeof(DDCAPS);\n lpDDDriverCaps->dwCaps = DDCAPS_BLT|DDCAPS_PALETTE;\n lpDDDriverCaps->dwCKeyCaps = 0;\n lpDDDriverCaps->dwPalCaps = DDPCAPS_8BIT|DDPCAPS_PRIMARYSURFACE;\n lpDDDriverCaps->dwVidMemTotal = 16777216;\n lpDDDriverCaps->dwVidMemFree = 16777216;\n lpDDDriverCaps->dwMaxVisibleOverlays = 0;\n lpDDDriverCaps->dwCurrVisibleOverlays = 0;\n lpDDDriverCaps->dwNumFourCCCodes = 0;\n lpDDDriverCaps->dwAlignBoundarySrc = 0;\n lpDDDriverCaps->dwAlignSizeSrc = 0;\n lpDDDriverCaps->dwAlignBoundaryDest = 0;\n lpDDDriverCaps->dwAlignSizeDest = 0;\n }\n\n if(lpDDEmulCaps)\n {\n lpDDEmulCaps->dwSize = 0;\n }\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetDisplayMode(IDirectDrawImpl *This, LPDDSURFACEDESC a)\n{\n printf(\"DirectDraw::GetDisplayMode(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetFourCCCodes(IDirectDrawImpl *This, LPDWORD a, LPDWORD b)\n{\n printf(\"DirectDraw::GetFourCCCodes(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetGDISurface(IDirectDrawImpl *This, LPDIRECTDRAWSURFACE *a)\n{\n printf(\"DirectDraw::GetGDISurface(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetMonitorFrequency(IDirectDrawImpl *This, LPDWORD a)\n{\n printf(\"DirectDraw::GetMonitorFrequency(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetScanLine(IDirectDrawImpl *This, LPDWORD a)\n{\n printf(\"DirectDraw::GetScanLine(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetVerticalBlankStatus(IDirectDrawImpl *This, LPBOOL a)\n{\n printf(\"DirectDraw::GetVerticalBlankStatus(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_Initialize(IDirectDrawImpl *This, GUID *a)\n{\n printf(\"DirectDraw::Initialize(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_RestoreDisplayMode(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::RestoreDisplayMode(This=%p)\\n\", This);\n\n if(!This->render.run)\n {\n return DD_OK;\n }\n\n /* only stop drawing in GL mode when minimized */\n if (This->renderer == render_main)\n {\n EnterCriticalSection(&This->cs);\n This->render.run = FALSE;\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&This->cs);\n\n WaitForSingleObject(This->render.thread, INFINITE);\n This->render.thread = NULL;\n }\n\n if(!ddraw->windowed)\n {\n ChangeDisplaySettings(&This->mode, 0);\n }\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_SetDisplayMode(IDirectDrawImpl *This, DWORD width, DWORD height, DWORD bpp)\n{\n printf(\"DirectDraw::SetDisplayMode(This=%p, width=%d, height=%d, bpp=%d)\\n\", This, (unsigned int)width, (unsigned int)height, (unsigned int)bpp);\n\n This->mode.dmSize = sizeof(DEVMODE);\n This->mode.dmDriverExtra = 0;\n\n if(EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &This->mode) == FALSE)\n {\n /* not expected */\n return DDERR_UNSUPPORTED;\n }\n\n This->width = width;\n This->height = height;\n This->bpp = bpp;\n This->cursorclip.width = width;\n This->cursorclip.height = height;\n\n ddraw->cursor.x = ddraw->cursorclip.width / 2;\n ddraw->cursor.y = ddraw->cursorclip.height / 2;\n\n if(This->render.width < This->width)\n {\n This->render.width = This->width;\n }\n if(This->render.height < This->height)\n {\n This->render.height = This->height;\n }\n\n This->render.run = TRUE;\n\n if (This->renderer == render_dummy_main)\n {\n if(This->render.thread == NULL)\n {\n This->render.thread = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)This->renderer, NULL, 0, NULL);\n }\n return DD_OK;\n }\n\n mouse_unlock();\n\n if(This->windowed)\n {\n if(!This->windowed_init)\n {\n if (!This->border)\n {\n SetWindowLong(This->hWnd, GWL_STYLE, GetWindowLong(This->hWnd, GWL_STYLE) & ~(WS_CAPTION | WS_THICKFRAME | WS_MINIMIZE | WS_MAXIMIZE | WS_SYSMENU));\n }\n else\n {\n SetWindowLong(This->hWnd, GWL_STYLE, GetWindowLong(This->hWnd, GWL_STYLE) | WS_CAPTION | WS_BORDER | WS_SYSMENU | WS_MINIMIZEBOX);\n }\n\n /* center the window with correct dimensions */\n int x = (This->mode.dmPelsWidth / 2) - (This->render.width / 2);\n int y = (This->mode.dmPelsHeight / 2) - (This->render.height / 2);\n RECT dst = { x, y, This->render.width+x, This->render.height+y };\n AdjustWindowRect(&dst, GetWindowLong(This->hWnd, GWL_STYLE), FALSE);\n SetWindowPos(This->hWnd, HWND_NOTOPMOST, dst.left, dst.top, (dst.right - dst.left), (dst.bottom - dst.top), SWP_SHOWWINDOW);\n\n This->windowed_init = TRUE;\n }\n }\n else\n {\n SetWindowPos(This->hWnd, HWND_TOPMOST, 0, 0, This->render.width, This->render.height, SWP_SHOWWINDOW);\n\n mouse_lock();\n\n memset(&This->render.mode, 0, sizeof(DEVMODE));\n This->render.mode.dmSize = sizeof(DEVMODE);\n This->render.mode.dmFields = DM_PELSWIDTH|DM_PELSHEIGHT;\n This->render.mode.dmPelsWidth = This->render.width;\n This->render.mode.dmPelsHeight = This->render.height;\n if(This->render.bpp)\n {\n This->render.mode.dmFields |= DM_BITSPERPEL;\n This->render.mode.dmBitsPerPel = This->render.bpp;\n }\n\n if(!This->devmode && ChangeDisplaySettings(&This->render.mode, CDS_FULLSCREEN) != DISP_CHANGE_SUCCESSFUL)\n {\n This->render.run = FALSE;\n return DDERR_INVALIDMODE;\n }\n }\n\n if(This->render.thread == NULL)\n {\n This->render.thread = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)This->renderer, NULL, 0, NULL);\n }\n\n return DD_OK;\n}\n\n/* minimal window proc for dummy renderer as everything is emulated */\nLRESULT CALLBACK dummy_WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)\n{\n switch(uMsg)\n {\n /* if the plugin window changes */\n case WM_USER:\n ddraw->hWnd = (HWND)lParam;\n ddraw->render.hDC = GetDC(ddraw->hWnd);\n case WM_ACTIVATEAPP:\n if (wParam == TRUE)\n {\n break;\n }\n case WM_SIZE:\n case WM_NCACTIVATE:\n return DefWindowProc(hWnd, uMsg, wParam, lParam);\n case WM_MOUSEMOVE:\n case WM_NCMOUSEMOVE:\n ddraw->cursor.x = GET_X_LPARAM(lParam);\n ddraw->cursor.y = GET_Y_LPARAM(lParam);\n break;\n }\n\n if (ddraw->WndProc)\n {\n return ddraw->WndProc(hWnd, uMsg, wParam, lParam);\n }\n\n return DefWindowProc(hWnd, uMsg, wParam, lParam);\n}\n\nLRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)\n{\n RECT rc = { 0, 0, ddraw->render.width, ddraw->render.height };\n\n switch(uMsg)\n {\n /* Carmageddon stops the main loop when it sees these, DefWindowProc is also bad */\n case WM_WINDOWPOSCHANGING:\n case WM_WINDOWPOSCHANGED:\n return 0;\n\n /* C&C and RA really don't want to close down */\n case WM_SYSCOMMAND:\n if (wParam == SC_CLOSE)\n {\n exit(0);\n }\n return DefWindowProc(hWnd, uMsg, wParam, lParam);\n\n case WM_ACTIVATE:\n if (wParam == WA_ACTIVE || wParam == WA_CLICKACTIVE)\n {\n if (wParam == WA_ACTIVE)\n {\n mouse_lock();\n }\n if (!ddraw->windowed)\n {\n ChangeDisplaySettings(&ddraw->render.mode, CDS_FULLSCREEN);\n }\n }\n else if (wParam == WA_INACTIVE)\n {\n mouse_unlock();\n\n /* minimize our window on defocus when in fullscreen */\n if (!ddraw->windowed)\n {\n ChangeDisplaySettings(&ddraw->mode, 0);\n ShowWindow(ddraw->hWnd, SW_MINIMIZE);\n }\n }\n return 0;\n\n case WM_MOUSELEAVE:\n mouse_unlock();\n return 0;\n\n case WM_ACTIVATEAPP:\n /* C&C and RA stop drawing when they receive this with FALSE wParam, disable in windowed mode */\n if (ddraw->windowed)\n {\n return 0;\n }\n break;\n\n case WM_KEYDOWN:\n if(wParam == VK_CONTROL || wParam == VK_TAB)\n {\n if(GetAsyncKeyState(VK_CONTROL) & 0x8000 && GetAsyncKeyState(VK_TAB) & 0x8000)\n {\n mouse_unlock();\n return 0;\n }\n }\n#ifdef HAVE_LIBPNG\n if(wParam == VK_CONTROL || wParam == 0x53 /* S */)\n {\n if(GetAsyncKeyState(VK_CONTROL) & 0x8000 && GetAsyncKeyState(0x53) & 0x8000)\n {\n screenshot(ddraw->primary);\n return 0;\n }\n }\n#endif\n break;\n\n /* button up messages reactivate cursor lock */\n case WM_LBUTTONUP:\n case WM_RBUTTONUP:\n case WM_MBUTTONUP:\n if (ddraw->mhack && !ddraw->locked)\n {\n ddraw->cursor.x = LOWORD(lParam) * ((float)ddraw->width / ddraw->render.width);\n ddraw->cursor.y = HIWORD(lParam) * ((float)ddraw->height / ddraw->render.height);\n mouse_lock();\n return 0;\n }\n /* fall through for lParam */\n\n /* down messages are ignored if we have no cursor lock */\n case WM_LBUTTONDOWN:\n case WM_RBUTTONDOWN:\n case WM_MBUTTONDOWN:\n case WM_MOUSEMOVE:\n if (ddraw->mhack)\n {\n if (!ddraw->locked)\n {\n return 0;\n }\n\n fake_GetCursorPos(NULL); /* update our own cursor */\n lParam = MAKELPARAM(ddraw->cursor.x, ddraw->cursor.y);\n }\n\n if (ddraw->devmode)\n {\n mouse_lock();\n ddraw->cursor.x = GET_X_LPARAM(lParam);\n ddraw->cursor.y = GET_Y_LPARAM(lParam);\n }\n break;\n\n /* make sure we redraw when WM_PAINT is requested */\n case WM_PAINT:\n EnterCriticalSection(&ddraw->cs);\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&ddraw->cs);\n break;\n\n case WM_ERASEBKGND:\n EnterCriticalSection(&ddraw->cs);\n FillRect(ddraw->render.hDC, &rc, (HBRUSH) GetStockObject(BLACK_BRUSH));\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&ddraw->cs);\n break;\n }\n\n return ddraw->WndProc(hWnd, uMsg, wParam, lParam);\n}\n\nHRESULT __stdcall ddraw_SetCooperativeLevel(IDirectDrawImpl *This, HWND hWnd, DWORD dwFlags)\n{\n PIXELFORMATDESCRIPTOR pfd;\n\n printf(\"DirectDraw::SetCooperativeLevel(This=%p, hWnd=0x%08X, dwFlags=0x%08X)\\n\", This, (unsigned int)hWnd, (unsigned int)dwFlags);\n\n /* Red Alert for some weird reason does this on Windows XP */\n if(hWnd == NULL)\n {\n return DDERR_INVALIDPARAMS;\n }\n\n if (This->hWnd == NULL)\n {\n This->hWnd = hWnd;\n }\n\n mouse_init(hWnd);\n\n This->WndProc = (LRESULT CALLBACK (*)(HWND, UINT, WPARAM, LPARAM))GetWindowLong(hWnd, GWL_WNDPROC);\n\n if (This->renderer == render_dummy_main)\n {\n This->render.hDC = GetDC(This->hWnd);\n SetWindowLong(hWnd, GWL_WNDPROC, (LONG)dummy_WndProc);\n ShowWindow(hWnd, SW_HIDE);\n PostMessage(hWnd, WM_ACTIVATEAPP, TRUE, TRUE);\n PostMessage(This->hWnd, WM_USER, 0, (LPARAM)hWnd);\n return DD_OK;\n }\n\n SetWindowLong(This->hWnd, GWL_WNDPROC, (LONG)WndProc);\n\n if(!This->render.hDC)\n {\n This->render.hDC = GetDC(This->hWnd);\n\n memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));\n pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);\n pfd.nVersion = 1;\n pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_DOUBLEBUFFER | (This->renderer == render_main ? PFD_SUPPORT_OPENGL : 0);\n pfd.iPixelType = PFD_TYPE_RGBA;\n pfd.cColorBits = ddraw->render.bpp ? ddraw->render.bpp : ddraw->mode.dmBitsPerPel;\n pfd.iLayerType = PFD_MAIN_PLANE;\n SetPixelFormat( This->render.hDC, ChoosePixelFormat( This->render.hDC, &pfd ), &pfd );\n }\n\n SetCursor(LoadCursor(NULL, IDC_ARROW));\n\n GetWindowText(This->hWnd, (LPTSTR)&This->title, sizeof(This->title));\n\n if(This->vhack == 1)\n {\n if (strcmp(This->title, \"Command & Conquer\"))\n {\n This->vhack = 0;\n }\n }\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_WaitForVerticalBlank(IDirectDrawImpl *This, DWORD a, HANDLE b)\n{\n#if _DEBUG\n printf(\"DirectDraw::WaitForVerticalBlank(This=%p, ...)\\n\", This);\n#endif\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_QueryInterface(IDirectDrawImpl *This, REFIID riid, void **obj)\n{\n printf(\"DirectDraw::QueryInterface(This=%p, riid=%08X, obj=%p)\\n\", This, (unsigned int)riid, obj);\n\n *obj = This;\n\n return S_OK;\n}\n\nULONG __stdcall ddraw_AddRef(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::AddRef(This=%p)\\n\", This);\n\n This->Ref++;\n\n return This->Ref;\n}\n\nULONG __stdcall ddraw_Release(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::Release(This=%p)\\n\", This);\n\n This->Ref--;\n\n if(This->Ref == 0)\n {\n if (This->hWnd && This->renderer == render_dummy_main)\n {\n PostMessage(This->hWnd, WM_USER, 0, 0);\n }\n\n if(This->render.run)\n {\n EnterCriticalSection(&This->cs);\n This->render.run = FALSE;\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&This->cs);\n\n WaitForSingleObject(This->render.thread, INFINITE);\n This->render.thread = NULL;\n }\n\n if(This->render.hDC)\n {\n ReleaseDC(This->hWnd, This->render.hDC);\n This->render.hDC = NULL;\n }\n\n if(This->render.ev)\n {\n CloseHandle(This->render.ev);\n ddraw->render.ev = NULL;\n }\n\n if(This->real_dll)\n {\n FreeLibrary(This->real_dll);\n }\n\n DeleteCriticalSection(&This->cs);\n\n /* restore old wndproc, subsequent ddraw creation will otherwise fail */\n SetWindowLong(This->hWnd, GWL_WNDPROC, (LONG)This->WndProc);\n HeapFree(GetProcessHeap(), 0, This);\n ddraw = NULL;\n return 0;\n }\n\n return This->Ref;\n}\n\nstruct IDirectDrawImplVtbl iface =\n{\n /* IUnknown */\n ddraw_QueryInterface,\n ddraw_AddRef,\n ddraw_Release,\n /* IDirectDrawImpl */\n ddraw_Compact,\n ddraw_CreateClipper,\n ddraw_CreatePalette,\n ddraw_CreateSurface,\n ddraw_DuplicateSurface,\n ddraw_EnumDisplayModes,\n ddraw_EnumSurfaces,\n ddraw_FlipToGDISurface,\n ddraw_GetCaps,\n ddraw_GetDisplayMode,\n ddraw_GetFourCCCodes,\n ddraw_GetGDISurface,\n ddraw_GetMonitorFrequency,\n ddraw_GetScanLine,\n ddraw_GetVerticalBlankStatus,\n ddraw_Initialize,\n ddraw_RestoreDisplayMode,\n ddraw_SetCooperativeLevel,\n ddraw_SetDisplayMode,\n ddraw_WaitForVerticalBlank\n};\n\nint stdout_open = 0;\nHRESULT WINAPI DirectDrawCreate(GUID FAR* lpGUID, LPDIRECTDRAW FAR* lplpDD, IUnknown FAR* pUnkOuter) \n{\n#if _DEBUG\n if(!stdout_open)\n {\n freopen(\"stdout.txt\", \"w\", stdout);\n setvbuf(stdout, NULL, _IONBF, 0);\n stdout_open = 1;\n }\n#endif\n\n printf(\"DirectDrawCreate(lpGUID=%p, lplpDD=%p, pUnkOuter=%p)\\n\", lpGUID, lplpDD, pUnkOuter);\n\n if(ddraw)\n {\n /* FIXME: check the calling module before passing the call! */\n return ddraw->DirectDrawCreate(lpGUID, lplpDD, pUnkOuter);\n\n /*\n printf(\" returning DDERR_DIRECTDRAWALREADYCREATED\\n\");\n return DDERR_DIRECTDRAWALREADYCREATED;\n */\n } \n\n IDirectDrawImpl *This = (IDirectDrawImpl *)HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(IDirectDrawImpl));\n This->lpVtbl = &iface;\n printf(\" This = %p\\n\", This);\n *lplpDD = (LPDIRECTDRAW)This;\n This->Ref = 0;\n ddraw_AddRef(This);\n\n ddraw = This;\n\n This->real_dll = LoadLibrary(\"system32\\\\ddraw.dll\");\n if(!This->real_dll)\n {\n ddraw_Release(This);\n return DDERR_GENERIC;\n }\n\n This->DirectDrawCreate = (HRESULT WINAPI (*)(GUID FAR*, LPDIRECTDRAW FAR*, IUnknown FAR*))GetProcAddress(This->real_dll, \"DirectDrawCreate\");\n\n if(!This->DirectDrawCreate)\n {\n ddraw_Release(This);\n return DDERR_GENERIC;\n }\n\n InitializeCriticalSection(&This->cs);\n This->render.ev = CreateEvent(NULL, TRUE, FALSE, NULL);\n This->render.sem = CreateSemaphore(NULL, 0, 1, NULL);\n\n /* load configuration options from ddraw.ini */\n char cwd[MAX_PATH];\n char ini_path[MAX_PATH];\n char tmp[256];\n GetCurrentDirectoryA(sizeof(cwd), cwd);\n snprintf(ini_path, sizeof(ini_path), \"%s\\\\ddraw.ini\", cwd);\n\n if(GetFileAttributes(ini_path) == 0xFFFFFFFF)\n {\n FILE *fh = fopen(ini_path, \"w\");\n fputs(\n \"[ddraw]\\n\"\n \"; width and height of the window, defaults to the size game requests\\r\\n\"\n \"width=0\\n\"\n \"height=0\\n\"\n \"; bits per pixel, possible values: 16, 24 and 32, 0 = auto\\n\"\n \"bpp=0\\n\"\n \"windowed=true\\n\"\n \"; show window borders in windowed mode\\n\"\n \"border=true\\n\"\n \"; use letter- or windowboxing to make a best fit (GDI only!)\\n\"\n \"boxing=false\\n\"\n \"; real rendering rate, -1 = screen rate, 0 = unlimited, n = cap\\n\"\n \"maxfps=0\\n\"\n \"; vertical synchronization, enable if you get tearing (OpenGL only)\\n\"\n \"vsync=false\\n\"\n \"; scaling filter, nearest = sharp, linear = smooth (OpenGL only)\\n\"\n \"filter=nearest\\n\"\n \"; automatic mouse sensitivity scaling\\n\"\n \"adjmouse=false\\n\"\n \"; manual sensitivity scaling, 0 = disabled, 0.5 = half, 1.0 = normal\\n\"\n \"sensitivity=0.0\\n\"\n \"; enable C&C/RA mouse hack\\n\"\n \"mhack=true\\n\"\n \"; enable C&C video resize hack, auto = auto-detect game, true = forced, false = disabled (OpenGL only)\\n\"\n \"vhack=false\\n\"\n \"; switch between OpenGL (opengl) and software (gdi) renderers, latter supports less features but might be faster depending on the GPU\\n\"\n \"renderer=gdi\\n\"\n \"; force CPU0 affinity, avoids crashes with RA, *might* have a performance impact\\n\"\n \"singlecpu=true\\n\"\n , fh);\n fclose(fh);\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"windowed\", \"TRUE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'n' || tolower(tmp[0]) == 'f' || tolower(tmp[0]) == 'd' || tmp[0] == '0')\n {\n This->windowed = FALSE;\n }\n else\n {\n This->windowed = TRUE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"border\", \"TRUE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'n' || tolower(tmp[0]) == 'f' || tolower(tmp[0]) == 'd' || tmp[0] == '0')\n {\n This->border = FALSE;\n }\n else\n {\n This->border = TRUE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"boxing\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'n' || tolower(tmp[0]) == 'f' || tolower(tmp[0]) == 'd' || tmp[0] == '0')\n {\n This->boxing = FALSE;\n }\n else\n {\n This->boxing = TRUE;\n }\n\n This->render.maxfps = GetPrivateProfileIntA(\"ddraw\", \"maxfps\", 0, ini_path);\n This->render.width = GetPrivateProfileIntA(\"ddraw\", \"width\", 0, ini_path);\n This->render.height = GetPrivateProfileIntA(\"ddraw\", \"height\", 0, ini_path);\n\n This->render.bpp = GetPrivateProfileIntA(\"ddraw\", \"bpp\", 32, ini_path);\n if (This->render.bpp != 16 && This->render.bpp != 24 && This->render.bpp != 32)\n {\n This->render.bpp = 0;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"filter\", tmp, tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'l' || tolower(tmp[3]) == 'l')\n {\n This->render.filter = 1;\n }\n else\n {\n This->render.filter = 0;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"adjmouse\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->adjmouse = TRUE;\n }\n else\n {\n This->adjmouse = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"mhack\", \"TRUE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->mhack = TRUE;\n }\n else\n {\n This->mhack = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"devmode\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->devmode = TRUE;\n This->mhack = FALSE;\n }\n else\n {\n This->devmode = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"vsync\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->vsync = TRUE;\n }\n else\n {\n This->vsync = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"sensitivity\", \"0\", tmp, sizeof(tmp), ini_path);\n This->sensitivity = strtof(tmp, NULL);\n\n GetPrivateProfileStringA(\"ddraw\", \"vhack\", \"false\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->vhack = 2;\n }\n else if(tolower(tmp[0]) == 'a')\n {\n This->vhack = 1;\n }\n else\n {\n This->vhack = 0;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"renderer\", \"gdi\", tmp, sizeof(tmp), ini_path);\n if(tolower(tmp[0]) == 'd' || tolower(tmp[0]) == 'd')\n {\n printf(\"DirectDrawCreate: Using dummy renderer\\n\");\n This->renderer = render_dummy_main;\n }\n else if(tolower(tmp[0]) == 's' || tolower(tmp[0]) == 'g')\n {\n printf(\"DirectDrawCreate: Using software renderer\\n\");\n This->renderer = render_soft_main;\n }\n else\n {\n printf(\"DirectDrawCreate: Using OpenGL renderer\\n\");\n This->renderer = render_main;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"singlecpu\", \"true\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n printf(\"DirectDrawCreate: Setting CPU0 affinity\\n\");\n SetProcessAffinityMask(GetCurrentProcess(), 1);\n }\n\n /* last minute check for cnc-plugin */\n if (GetEnvironmentVariable(\"DDRAW_WINDOW\", tmp, sizeof(tmp)) > 0)\n {\n This->hWnd = (HWND)atoi(tmp);\n This->renderer = render_dummy_main;\n This->windowed = TRUE;\n\n if (GetEnvironmentVariable(\"DDRAW_WIDTH\", tmp, sizeof(tmp)) > 0)\n {\n This->render.width = atoi(tmp);\n }\n\n if (GetEnvironmentVariable(\"DDRAW_HEIGHT\", tmp, sizeof(tmp)) > 0)\n {\n This->render.height = atoi(tmp);\n }\n\n printf(\"DirectDrawCreate: Detected cnc-plugin at window %08X in %dx%d\\n\", (unsigned int)This->hWnd, This->render.width, This->render.height);\n }\n\n return DD_OK;\n}\n", "render.c": "/*\n * Copyright (c) 2010 Toni Spets \n *\n * Permission to use, copy, modify, and distribute this software for any\n * purpose with or without fee is hereby granted, provided that the above\n * copyright notice and this permission notice appear in all copies.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\n * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\n * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\n * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\n * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\n * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n */\n\n#include \n#include \n\n#include \"main.h\"\n#include \"surface.h\"\n\n#define CUTSCENE_WIDTH 640\n#define CUTSCENE_HEIGHT 400\n\nBOOL detect_cutscene();\n\nDWORD WINAPI render_main(void)\n{\n int i,j;\n HGLRC hRC;\n\n int tex_width = ddraw->width > 1024 ? ddraw->width : 1024;\n int tex_height = ddraw->height > 1024 ? ddraw->height : 1024;\n float scale_w = 1.0f;\n float scale_h = 1.0f;\n int *tex = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, tex_width * tex_height * sizeof(int));\n\n hRC = wglCreateContext( ddraw->render.hDC );\n wglMakeCurrent( ddraw->render.hDC, hRC );\n\n char *glext = (char *)glGetString(GL_EXTENSIONS);\n\n if(glext && strstr(glext, \"WGL_EXT_swap_control\"))\n {\n BOOL (APIENTRY *wglSwapIntervalEXT)(int) = (BOOL (APIENTRY *)(int))wglGetProcAddress(\"wglSwapIntervalEXT\");\n if(wglSwapIntervalEXT)\n {\n if(ddraw->vsync)\n {\n wglSwapIntervalEXT(1);\n }\n else\n {\n wglSwapIntervalEXT(0);\n }\n }\n }\n\n DWORD tick_start = 0;\n DWORD tick_end = 0;\n DWORD frame_len = 0;\n\n if(ddraw->render.maxfps < 0)\n {\n ddraw->render.maxfps = ddraw->mode.dmDisplayFrequency;\n }\n\n if(ddraw->render.maxfps > 0)\n {\n frame_len = 1000.0f / ddraw->render.maxfps;\n }\n\n glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, tex_width, tex_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, tex);\n glViewport(0, 0, ddraw->render.width, ddraw->render.height);\n\n if(ddraw->render.filter)\n {\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);\n }\n else\n {\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);\n }\n\n glEnable(GL_TEXTURE_2D);\n\n while(ddraw->render.run && WaitForSingleObject(ddraw->render.sem, INFINITE) != WAIT_FAILED)\n {\n scale_w = (float)ddraw->width/tex_width;\n scale_h = (float)ddraw->height/tex_height;\n\n if(ddraw->render.maxfps > 0)\n {\n tick_start = GetTickCount();\n }\n\n /* convert ddraw surface to opengl texture */\n EnterCriticalSection(&ddraw->cs);\n\n if(ddraw->primary && ddraw->primary->palette)\n {\n if(ddraw->vhack && detect_cutscene())\n {\n scale_w *= (float)CUTSCENE_WIDTH / ddraw->width;\n scale_h *= (float)CUTSCENE_HEIGHT / ddraw->height;\n\n if (ddraw->cursorclip.width != CUTSCENE_WIDTH || ddraw->cursorclip.height != CUTSCENE_HEIGHT)\n {\n ddraw->cursorclip.width = CUTSCENE_WIDTH;\n ddraw->cursorclip.height = CUTSCENE_HEIGHT;\n ddraw->cursor.x = CUTSCENE_WIDTH / 2;\n ddraw->cursor.y = CUTSCENE_HEIGHT / 2;\n }\n }\n else\n {\n if (ddraw->cursorclip.width != ddraw->width || ddraw->cursorclip.height != ddraw->height)\n {\n ddraw->cursorclip.width = ddraw->width;\n ddraw->cursorclip.height = ddraw->height;\n ddraw->cursor.x = ddraw->width / 2;\n ddraw->cursor.y = ddraw->height / 2;\n }\n }\n\n for(i=0; iheight; i++)\n {\n for(j=0; jwidth; j++)\n {\n tex[i*ddraw->width+j] = ddraw->primary->palette->data_bgr[((unsigned char *)ddraw->primary->surface)[i*ddraw->primary->lPitch + j*ddraw->primary->lXPitch]];\n }\n }\n }\n LeaveCriticalSection(&ddraw->cs);\n\n glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, ddraw->width, ddraw->height, GL_RGBA, GL_UNSIGNED_BYTE, tex);\n\n glBegin(GL_TRIANGLE_FAN);\n glTexCoord2f(0,0); glVertex2f(-1, 1);\n glTexCoord2f(scale_w,0); glVertex2f( 1, 1);\n glTexCoord2f(scale_w,scale_h); glVertex2f( 1, -1);\t\n glTexCoord2f(0,scale_h); glVertex2f(-1, -1);\n glEnd();\n\n SwapBuffers(ddraw->render.hDC);\n\n if(ddraw->render.maxfps > 0)\n {\n tick_end = GetTickCount();\n\n if(tick_end - tick_start < frame_len)\n {\n Sleep( frame_len - (tick_end - tick_start) );\n }\n }\n\n SetEvent(ddraw->render.ev);\n }\n\n HeapFree(GetProcessHeap(), 0, tex);\n\n wglMakeCurrent(NULL, NULL);\n wglDeleteContext(hRC);\n\n return 0;\n}\n\nstatic unsigned char getPixel(int x, int y)\n{\n return ((unsigned char *)ddraw->primary->surface)[y*ddraw->primary->lPitch + x*ddraw->primary->lXPitch];\n}\n\nBOOL detect_cutscene()\n{\n if(ddraw->width <= CUTSCENE_WIDTH || ddraw->height <= CUTSCENE_HEIGHT)\n return FALSE;\n\n return getPixel(CUTSCENE_WIDTH + 1, 0) == 0 || getPixel(CUTSCENE_WIDTH + 5, 1) == 0 ? TRUE : FALSE;\n}\n", "render_soft.c": "/*\n * Copyright (c) 2011 Toni Spets \n *\n * Permission to use, copy, modify, and distribute this software for any\n * purpose with or without fee is hereby granted, provided that the above\n * copyright notice and this permission notice appear in all copies.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\n * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\n * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\n * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\n * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\n * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n */\n\n#include \n#include \n\n#include \"main.h\"\n#include \"surface.h\"\n\nDWORD WINAPI render_soft_main(void)\n{\n PBITMAPINFO bmi = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(BITMAPINFOHEADER) + sizeof(RGBQUAD) * 256);\n\n bmi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);\n bmi->bmiHeader.biWidth = ddraw->width;\n bmi->bmiHeader.biHeight = -ddraw->height;\n bmi->bmiHeader.biPlanes = 1;\n bmi->bmiHeader.biBitCount = ddraw->bpp;\n bmi->bmiHeader.biCompression = BI_RGB;\n\n DWORD dst_top = 0;\n DWORD dst_left = 0;\n DWORD dst_width = ddraw->render.width;\n DWORD dst_height = ddraw->render.height;\n\n DWORD tick_start = 0;\n DWORD tick_end = 0;\n DWORD frame_len = 0;\n\n if (ddraw->boxing)\n {\n dst_width = ddraw->width;\n dst_height = ddraw->height;\n\n /* test if we can double scale the window */\n if (ddraw->width * 2 <= ddraw->render.width && ddraw->height * 2 <= ddraw->render.height)\n {\n dst_width *= 2;\n dst_height *= 2;\n }\n\n dst_top = ddraw->render.height / 2 - dst_height / 2;\n dst_left = ddraw->render.width / 2 - dst_width / 2;\n }\n\n if(ddraw->render.maxfps < 0)\n {\n ddraw->render.maxfps = ddraw->mode.dmDisplayFrequency;\n }\n\n if(ddraw->render.maxfps > 0)\n {\n frame_len = 1000.0f / ddraw->render.maxfps;\n }\n\n while (ddraw->render.run && WaitForSingleObject(ddraw->render.sem, INFINITE) != WAIT_FAILED)\n {\n if(ddraw->render.maxfps > 0)\n {\n tick_start = GetTickCount();\n }\n\n EnterCriticalSection(&ddraw->cs);\n\n if (ddraw->primary && (ddraw->primary->palette || ddraw->bpp == 16))\n {\n if (ddraw->primary->palette && ddraw->primary->palette->data_rgb == NULL)\n {\n ddraw->primary->palette->data_rgb = &bmi->bmiColors[0];\n }\n\n if (ddraw->render.width != ddraw->width || ddraw->render.height != ddraw->height)\n {\n StretchDIBits(ddraw->render.hDC, dst_left, dst_top, dst_width, dst_height, 0, 0, ddraw->width, ddraw->height, ddraw->primary->surface, bmi, DIB_RGB_COLORS, SRCCOPY);\n }\n else\n {\n SetDIBitsToDevice(ddraw->render.hDC, 0, 0, ddraw->width, ddraw->height, 0, 0, 0, ddraw->height, ddraw->primary->surface, bmi, DIB_RGB_COLORS);\n }\n }\n LeaveCriticalSection(&ddraw->cs);\n\n if(ddraw->render.maxfps > 0)\n {\n tick_end = GetTickCount();\n\n if(tick_end - tick_start < frame_len)\n {\n Sleep( frame_len - (tick_end - tick_start) );\n }\n }\n\n SetEvent(ddraw->render.ev);\n }\n\n HeapFree(GetProcessHeap(), 0, bmi);\n\n return TRUE;\n}\n"}, "files_after": {"main.c": "/*\n * Copyright (c) 2010 Toni Spets \n *\n * Permission to use, copy, modify, and distribute this software for any\n * purpose with or without fee is hereby granted, provided that the above\n * copyright notice and this permission notice appear in all copies.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\n * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\n * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\n * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\n * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\n * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n */\n\n#include \n#include \n#include \n#include \n#include \"ddraw.h\"\n\n#include \"main.h\"\n#include \"palette.h\"\n#include \"surface.h\"\n#include \"clipper.h\"\n\n#define IDR_MYMENU 93\n\n/* from mouse.c */\nBOOL WINAPI fake_GetCursorPos(LPPOINT lpPoint);\nvoid mouse_init(HWND);\nvoid mouse_lock();\nvoid mouse_unlock();\n\n/* from screenshot.c */\n#ifdef HAVE_LIBPNG\nBOOL screenshot(struct IDirectDrawSurfaceImpl *);\n#endif\n\nIDirectDrawImpl *ddraw = NULL;\n\nDWORD WINAPI render_main(void);\nDWORD WINAPI render_soft_main(void);\nDWORD WINAPI render_dummy_main(void);\n\nHRESULT __stdcall ddraw_Compact(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::Compact(This=%p)\\n\", This);\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_DuplicateSurface(IDirectDrawImpl *This, LPDIRECTDRAWSURFACE src, LPDIRECTDRAWSURFACE *dest)\n{\n printf(\"DirectDraw::DuplicateSurface(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_EnumDisplayModes(IDirectDrawImpl *This, DWORD a, LPDDSURFACEDESC b, LPVOID c, LPDDENUMMODESCALLBACK d)\n{\n printf(\"DirectDraw::EnumDisplayModes(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_EnumSurfaces(IDirectDrawImpl *This, DWORD a, LPDDSURFACEDESC b, LPVOID c, LPDDENUMSURFACESCALLBACK d)\n{\n printf(\"DirectDraw::EnumSurfaces(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_FlipToGDISurface(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::FlipToGDISurface(This=%p)\\n\", This);\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetCaps(IDirectDrawImpl *This, LPDDCAPS lpDDDriverCaps, LPDDCAPS lpDDEmulCaps)\n{\n printf(\"DirectDraw::GetCaps(This=%p, lpDDDriverCaps=%p, lpDDEmulCaps=%p)\\n\", This, lpDDDriverCaps, lpDDEmulCaps);\n\n if(lpDDDriverCaps)\n {\n lpDDDriverCaps->dwSize = sizeof(DDCAPS);\n lpDDDriverCaps->dwCaps = DDCAPS_BLT|DDCAPS_PALETTE;\n lpDDDriverCaps->dwCKeyCaps = 0;\n lpDDDriverCaps->dwPalCaps = DDPCAPS_8BIT|DDPCAPS_PRIMARYSURFACE;\n lpDDDriverCaps->dwVidMemTotal = 16777216;\n lpDDDriverCaps->dwVidMemFree = 16777216;\n lpDDDriverCaps->dwMaxVisibleOverlays = 0;\n lpDDDriverCaps->dwCurrVisibleOverlays = 0;\n lpDDDriverCaps->dwNumFourCCCodes = 0;\n lpDDDriverCaps->dwAlignBoundarySrc = 0;\n lpDDDriverCaps->dwAlignSizeSrc = 0;\n lpDDDriverCaps->dwAlignBoundaryDest = 0;\n lpDDDriverCaps->dwAlignSizeDest = 0;\n }\n\n if(lpDDEmulCaps)\n {\n lpDDEmulCaps->dwSize = 0;\n }\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetDisplayMode(IDirectDrawImpl *This, LPDDSURFACEDESC a)\n{\n printf(\"DirectDraw::GetDisplayMode(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetFourCCCodes(IDirectDrawImpl *This, LPDWORD a, LPDWORD b)\n{\n printf(\"DirectDraw::GetFourCCCodes(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetGDISurface(IDirectDrawImpl *This, LPDIRECTDRAWSURFACE *a)\n{\n printf(\"DirectDraw::GetGDISurface(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetMonitorFrequency(IDirectDrawImpl *This, LPDWORD a)\n{\n printf(\"DirectDraw::GetMonitorFrequency(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetScanLine(IDirectDrawImpl *This, LPDWORD a)\n{\n printf(\"DirectDraw::GetScanLine(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_GetVerticalBlankStatus(IDirectDrawImpl *This, LPBOOL a)\n{\n printf(\"DirectDraw::GetVerticalBlankStatus(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_Initialize(IDirectDrawImpl *This, GUID *a)\n{\n printf(\"DirectDraw::Initialize(This=%p, ...)\\n\", This);\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_RestoreDisplayMode(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::RestoreDisplayMode(This=%p)\\n\", This);\n\n if(!This->render.run)\n {\n return DD_OK;\n }\n\n /* only stop drawing in GL mode when minimized */\n if (This->renderer == render_main)\n {\n EnterCriticalSection(&This->cs);\n This->render.run = FALSE;\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&This->cs);\n\n WaitForSingleObject(This->render.thread, INFINITE);\n This->render.thread = NULL;\n }\n\n if(!ddraw->windowed)\n {\n ChangeDisplaySettings(&This->mode, 0);\n }\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_SetDisplayMode(IDirectDrawImpl *This, DWORD width, DWORD height, DWORD bpp)\n{\n printf(\"DirectDraw::SetDisplayMode(This=%p, width=%d, height=%d, bpp=%d)\\n\", This, (unsigned int)width, (unsigned int)height, (unsigned int)bpp);\n\n This->mode.dmSize = sizeof(DEVMODE);\n This->mode.dmDriverExtra = 0;\n\n if(EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &This->mode) == FALSE)\n {\n /* not expected */\n return DDERR_UNSUPPORTED;\n }\n\n This->width = width;\n This->height = height;\n This->bpp = bpp;\n This->cursorclip.width = width;\n This->cursorclip.height = height;\n\n ddraw->cursor.x = ddraw->cursorclip.width / 2;\n ddraw->cursor.y = ddraw->cursorclip.height / 2;\n\n if(This->render.width < This->width)\n {\n This->render.width = This->width;\n }\n if(This->render.height < This->height)\n {\n This->render.height = This->height;\n }\n\n This->render.run = TRUE;\n\n if (This->renderer == render_dummy_main)\n {\n if(This->render.thread == NULL)\n {\n This->render.thread = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)This->renderer, NULL, 0, NULL);\n }\n return DD_OK;\n }\n\n mouse_unlock();\n\t\n\tconst HANDLE hbicon = LoadImage(GetModuleHandle(0), MAKEINTRESOURCE(IDR_MYMENU), IMAGE_ICON, GetSystemMetrics(SM_CXICON), GetSystemMetrics(SM_CYICON), 0);\n\tif (hbicon)\n\t\tSendMessage(This->hWnd, WM_SETICON, ICON_BIG, (LPARAM)hbicon);\n\n\tconst HANDLE hsicon = LoadImage(GetModuleHandle(0), MAKEINTRESOURCE(IDR_MYMENU), IMAGE_ICON, GetSystemMetrics(SM_CXSMICON), GetSystemMetrics(SM_CYSMICON), 0);\n\tif (hsicon)\n\t\tSendMessage(This->hWnd, WM_SETICON, ICON_SMALL, (LPARAM)hsicon);\n\n if(This->windowed)\n {\n if(!This->windowed_init)\n {\n if (!This->border)\n {\n SetWindowLong(This->hWnd, GWL_STYLE, GetWindowLong(This->hWnd, GWL_STYLE) & ~(WS_CAPTION | WS_THICKFRAME | WS_MINIMIZE | WS_MAXIMIZE | WS_SYSMENU));\n }\n else\n {\n SetWindowLong(This->hWnd, GWL_STYLE, GetWindowLong(This->hWnd, GWL_STYLE) | WS_CAPTION | WS_BORDER | WS_SYSMENU | WS_MINIMIZEBOX);\n }\n\n /* center the window with correct dimensions */\n int x = (This->mode.dmPelsWidth / 2) - (This->render.width / 2);\n int y = (This->mode.dmPelsHeight / 2) - (This->render.height / 2);\n RECT dst = { x, y, This->render.width+x, This->render.height+y };\n AdjustWindowRect(&dst, GetWindowLong(This->hWnd, GWL_STYLE), FALSE);\n SetWindowPos(This->hWnd, HWND_NOTOPMOST, dst.left, dst.top, (dst.right - dst.left), (dst.bottom - dst.top), SWP_SHOWWINDOW);\n\n This->windowed_init = TRUE;\n }\n }\n else\n {\n SetWindowPos(This->hWnd, HWND_TOPMOST, 0, 0, This->render.width, This->render.height, SWP_SHOWWINDOW);\n\n mouse_lock();\n\n memset(&This->render.mode, 0, sizeof(DEVMODE));\n This->render.mode.dmSize = sizeof(DEVMODE);\n This->render.mode.dmFields = DM_PELSWIDTH|DM_PELSHEIGHT;\n This->render.mode.dmPelsWidth = This->render.width;\n This->render.mode.dmPelsHeight = This->render.height;\n if(This->render.bpp)\n {\n This->render.mode.dmFields |= DM_BITSPERPEL;\n This->render.mode.dmBitsPerPel = This->render.bpp;\n }\n\n if(!This->devmode && ChangeDisplaySettings(&This->render.mode, CDS_FULLSCREEN) != DISP_CHANGE_SUCCESSFUL)\n {\n This->render.run = FALSE;\n return DDERR_INVALIDMODE;\n }\n }\n\n if(This->render.thread == NULL)\n {\n This->render.thread = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)This->renderer, NULL, 0, NULL);\n }\n\n return DD_OK;\n}\n\n/* minimal window proc for dummy renderer as everything is emulated */\nLRESULT CALLBACK dummy_WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)\n{\n switch(uMsg)\n {\n /* if the plugin window changes */\n case WM_USER:\n ddraw->hWnd = (HWND)lParam;\n ddraw->render.hDC = GetDC(ddraw->hWnd);\n case WM_ACTIVATEAPP:\n if (wParam == TRUE)\n {\n break;\n }\n case WM_SIZE:\n case WM_NCACTIVATE:\n return DefWindowProc(hWnd, uMsg, wParam, lParam);\n case WM_MOUSEMOVE:\n case WM_NCMOUSEMOVE:\n ddraw->cursor.x = GET_X_LPARAM(lParam);\n ddraw->cursor.y = GET_Y_LPARAM(lParam);\n break;\n }\n\n if (ddraw->WndProc)\n {\n return ddraw->WndProc(hWnd, uMsg, wParam, lParam);\n }\n\n return DefWindowProc(hWnd, uMsg, wParam, lParam);\n}\n\nLRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)\n{\n RECT rc = { 0, 0, ddraw->render.width, ddraw->render.height };\n\n switch(uMsg)\n {\n /* Carmageddon stops the main loop when it sees these, DefWindowProc is also bad */\n case WM_WINDOWPOSCHANGING:\n case WM_WINDOWPOSCHANGED:\n return 0;\n\n /* C&C and RA really don't want to close down */\n case WM_SYSCOMMAND:\n if (wParam == SC_CLOSE)\n {\n exit(0);\n }\n return DefWindowProc(hWnd, uMsg, wParam, lParam);\n\n case WM_ACTIVATE:\n if (wParam == WA_ACTIVE || wParam == WA_CLICKACTIVE)\n {\n if (wParam == WA_ACTIVE)\n {\n mouse_lock();\n }\n if (!ddraw->windowed)\n {\n ChangeDisplaySettings(&ddraw->render.mode, CDS_FULLSCREEN);\n }\n }\n else if (wParam == WA_INACTIVE)\n {\n mouse_unlock();\n\n /* minimize our window on defocus when in fullscreen */\n if (!ddraw->windowed)\n {\n ChangeDisplaySettings(&ddraw->mode, 0);\n ShowWindow(ddraw->hWnd, SW_MINIMIZE);\n }\n }\n return 0;\n\n case WM_MOUSELEAVE:\n mouse_unlock();\n return 0;\n\n case WM_ACTIVATEAPP:\n /* C&C and RA stop drawing when they receive this with FALSE wParam, disable in windowed mode */\n if (ddraw->windowed)\n {\n return 0;\n }\n break;\n\n case WM_KEYDOWN:\n if(wParam == VK_CONTROL || wParam == VK_TAB)\n {\n if(GetAsyncKeyState(VK_CONTROL) & 0x8000 && GetAsyncKeyState(VK_TAB) & 0x8000)\n {\n mouse_unlock();\n return 0;\n }\n }\n#ifdef HAVE_LIBPNG\n if(wParam == VK_CONTROL || wParam == 0x53 /* S */)\n {\n if(GetAsyncKeyState(VK_CONTROL) & 0x8000 && GetAsyncKeyState(0x53) & 0x8000)\n {\n screenshot(ddraw->primary);\n return 0;\n }\n }\n#endif\n break;\n\n /* button up messages reactivate cursor lock */\n case WM_LBUTTONUP:\n case WM_RBUTTONUP:\n case WM_MBUTTONUP:\n if (ddraw->mhack && !ddraw->locked)\n {\n ddraw->cursor.x = LOWORD(lParam) * ((float)ddraw->width / ddraw->render.width);\n ddraw->cursor.y = HIWORD(lParam) * ((float)ddraw->height / ddraw->render.height);\n mouse_lock();\n return 0;\n }\n /* fall through for lParam */\n\n /* down messages are ignored if we have no cursor lock */\n case WM_LBUTTONDOWN:\n case WM_RBUTTONDOWN:\n case WM_MBUTTONDOWN:\n case WM_MOUSEMOVE:\n if (ddraw->mhack)\n {\n if (!ddraw->locked)\n {\n return 0;\n }\n\n fake_GetCursorPos(NULL); /* update our own cursor */\n lParam = MAKELPARAM(ddraw->cursor.x, ddraw->cursor.y);\n }\n\n if (ddraw->devmode)\n {\n mouse_lock();\n ddraw->cursor.x = GET_X_LPARAM(lParam);\n ddraw->cursor.y = GET_Y_LPARAM(lParam);\n }\n break;\n\n /* make sure we redraw when WM_PAINT is requested */\n case WM_PAINT:\n EnterCriticalSection(&ddraw->cs);\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&ddraw->cs);\n break;\n\n case WM_ERASEBKGND:\n EnterCriticalSection(&ddraw->cs);\n FillRect(ddraw->render.hDC, &rc, (HBRUSH) GetStockObject(BLACK_BRUSH));\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&ddraw->cs);\n break;\n }\n\n return ddraw->WndProc(hWnd, uMsg, wParam, lParam);\n}\n\nHRESULT __stdcall ddraw_SetCooperativeLevel(IDirectDrawImpl *This, HWND hWnd, DWORD dwFlags)\n{\n PIXELFORMATDESCRIPTOR pfd;\n\n printf(\"DirectDraw::SetCooperativeLevel(This=%p, hWnd=0x%08X, dwFlags=0x%08X)\\n\", This, (unsigned int)hWnd, (unsigned int)dwFlags);\n\n /* Red Alert for some weird reason does this on Windows XP */\n if(hWnd == NULL)\n {\n return DDERR_INVALIDPARAMS;\n }\n\n if (This->hWnd == NULL)\n {\n This->hWnd = hWnd;\n }\n\n mouse_init(hWnd);\n\n This->WndProc = (LRESULT CALLBACK (*)(HWND, UINT, WPARAM, LPARAM))GetWindowLong(hWnd, GWL_WNDPROC);\n\n if (This->renderer == render_dummy_main)\n {\n This->render.hDC = GetDC(This->hWnd);\n SetWindowLong(hWnd, GWL_WNDPROC, (LONG)dummy_WndProc);\n ShowWindow(hWnd, SW_HIDE);\n PostMessage(hWnd, WM_ACTIVATEAPP, TRUE, TRUE);\n PostMessage(This->hWnd, WM_USER, 0, (LPARAM)hWnd);\n return DD_OK;\n }\n\n SetWindowLong(This->hWnd, GWL_WNDPROC, (LONG)WndProc);\n\n if(!This->render.hDC)\n {\n This->render.hDC = GetDC(This->hWnd);\n\n memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));\n pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);\n pfd.nVersion = 1;\n pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_DOUBLEBUFFER | (This->renderer == render_main ? PFD_SUPPORT_OPENGL : 0);\n pfd.iPixelType = PFD_TYPE_RGBA;\n pfd.cColorBits = ddraw->render.bpp ? ddraw->render.bpp : ddraw->mode.dmBitsPerPel;\n pfd.iLayerType = PFD_MAIN_PLANE;\n SetPixelFormat( This->render.hDC, ChoosePixelFormat( This->render.hDC, &pfd ), &pfd );\n }\n\n SetCursor(LoadCursor(NULL, IDC_ARROW));\n\n GetWindowText(This->hWnd, (LPTSTR)&This->title, sizeof(This->title));\n\n\tif (!strcmp(This->title, \"Red Alert\"))\n\t{\n\t\tddraw->isredalert = 1;\n\t}\n\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_WaitForVerticalBlank(IDirectDrawImpl *This, DWORD a, HANDLE b)\n{\n#if _DEBUG\n printf(\"DirectDraw::WaitForVerticalBlank(This=%p, ...)\\n\", This);\n#endif\n return DD_OK;\n}\n\nHRESULT __stdcall ddraw_QueryInterface(IDirectDrawImpl *This, REFIID riid, void **obj)\n{\n printf(\"DirectDraw::QueryInterface(This=%p, riid=%08X, obj=%p)\\n\", This, (unsigned int)riid, obj);\n\n *obj = This;\n\n return S_OK;\n}\n\nULONG __stdcall ddraw_AddRef(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::AddRef(This=%p)\\n\", This);\n\n This->Ref++;\n\n return This->Ref;\n}\n\nULONG __stdcall ddraw_Release(IDirectDrawImpl *This)\n{\n printf(\"DirectDraw::Release(This=%p)\\n\", This);\n\n This->Ref--;\n\n if(This->Ref == 0)\n {\n if (This->hWnd && This->renderer == render_dummy_main)\n {\n PostMessage(This->hWnd, WM_USER, 0, 0);\n }\n\n if(This->render.run)\n {\n EnterCriticalSection(&This->cs);\n This->render.run = FALSE;\n ReleaseSemaphore(ddraw->render.sem, 1, NULL);\n LeaveCriticalSection(&This->cs);\n\n WaitForSingleObject(This->render.thread, INFINITE);\n This->render.thread = NULL;\n }\n\n if(This->render.hDC)\n {\n ReleaseDC(This->hWnd, This->render.hDC);\n This->render.hDC = NULL;\n }\n\n if(This->render.ev)\n {\n CloseHandle(This->render.ev);\n ddraw->render.ev = NULL;\n }\n\n if(This->real_dll)\n {\n FreeLibrary(This->real_dll);\n }\n\n DeleteCriticalSection(&This->cs);\n\n /* restore old wndproc, subsequent ddraw creation will otherwise fail */\n SetWindowLong(This->hWnd, GWL_WNDPROC, (LONG)This->WndProc);\n HeapFree(GetProcessHeap(), 0, This);\n ddraw = NULL;\n return 0;\n }\n\n return This->Ref;\n}\n\nstruct IDirectDrawImplVtbl iface =\n{\n /* IUnknown */\n ddraw_QueryInterface,\n ddraw_AddRef,\n ddraw_Release,\n /* IDirectDrawImpl */\n ddraw_Compact,\n ddraw_CreateClipper,\n ddraw_CreatePalette,\n ddraw_CreateSurface,\n ddraw_DuplicateSurface,\n ddraw_EnumDisplayModes,\n ddraw_EnumSurfaces,\n ddraw_FlipToGDISurface,\n ddraw_GetCaps,\n ddraw_GetDisplayMode,\n ddraw_GetFourCCCodes,\n ddraw_GetGDISurface,\n ddraw_GetMonitorFrequency,\n ddraw_GetScanLine,\n ddraw_GetVerticalBlankStatus,\n ddraw_Initialize,\n ddraw_RestoreDisplayMode,\n ddraw_SetCooperativeLevel,\n ddraw_SetDisplayMode,\n ddraw_WaitForVerticalBlank\n};\n\nint stdout_open = 0;\nHRESULT WINAPI DirectDrawCreate(GUID FAR* lpGUID, LPDIRECTDRAW FAR* lplpDD, IUnknown FAR* pUnkOuter) \n{\n#if _DEBUG\n if(!stdout_open)\n {\n freopen(\"stdout.txt\", \"w\", stdout);\n setvbuf(stdout, NULL, _IONBF, 0);\n stdout_open = 1;\n }\n#endif\n\n printf(\"DirectDrawCreate(lpGUID=%p, lplpDD=%p, pUnkOuter=%p)\\n\", lpGUID, lplpDD, pUnkOuter);\n\n if(ddraw)\n {\n /* FIXME: check the calling module before passing the call! */\n return ddraw->DirectDrawCreate(lpGUID, lplpDD, pUnkOuter);\n\n /*\n printf(\" returning DDERR_DIRECTDRAWALREADYCREATED\\n\");\n return DDERR_DIRECTDRAWALREADYCREATED;\n */\n } \n\n IDirectDrawImpl *This = (IDirectDrawImpl *)HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(IDirectDrawImpl));\n This->lpVtbl = &iface;\n printf(\" This = %p\\n\", This);\n *lplpDD = (LPDIRECTDRAW)This;\n This->Ref = 0;\n ddraw_AddRef(This);\n\n ddraw = This;\n\n This->real_dll = LoadLibrary(\"system32\\\\ddraw.dll\");\n if(!This->real_dll)\n {\n ddraw_Release(This);\n return DDERR_GENERIC;\n }\n\n This->DirectDrawCreate = (HRESULT WINAPI (*)(GUID FAR*, LPDIRECTDRAW FAR*, IUnknown FAR*))GetProcAddress(This->real_dll, \"DirectDrawCreate\");\n\n if(!This->DirectDrawCreate)\n {\n ddraw_Release(This);\n return DDERR_GENERIC;\n }\n\n InitializeCriticalSection(&This->cs);\n This->render.ev = CreateEvent(NULL, TRUE, FALSE, NULL);\n This->render.sem = CreateSemaphore(NULL, 0, 1, NULL);\n\n /* load configuration options from ddraw.ini */\n char cwd[MAX_PATH];\n char ini_path[MAX_PATH];\n char tmp[256];\n GetCurrentDirectoryA(sizeof(cwd), cwd);\n snprintf(ini_path, sizeof(ini_path), \"%s\\\\ddraw.ini\", cwd);\n\n if(GetFileAttributes(ini_path) == 0xFFFFFFFF)\n {\n FILE *fh = fopen(ini_path, \"w\");\n fputs(\n \"[ddraw]\\n\"\n \"; width and height of the window, defaults to the size game requests\\r\\n\"\n \"width=0\\n\"\n \"height=0\\n\"\n \"; bits per pixel, possible values: 16, 24 and 32, 0 = auto\\n\"\n \"bpp=0\\n\"\n \"windowed=true\\n\"\n \"; show window borders in windowed mode\\n\"\n \"border=true\\n\"\n \"; use letter- or windowboxing to make a best fit (GDI only!)\\n\"\n \"boxing=false\\n\"\n \"; real rendering rate, -1 = screen rate, 0 = unlimited, n = cap\\n\"\n \"maxfps=0\\n\"\n \"; vertical synchronization, enable if you get tearing (OpenGL only)\\n\"\n \"vsync=false\\n\"\n \"; scaling filter, nearest = sharp, linear = smooth (OpenGL only)\\n\"\n \"filter=nearest\\n\"\n \"; automatic mouse sensitivity scaling\\n\"\n \"adjmouse=false\\n\"\n \"; manual sensitivity scaling, 0 = disabled, 0.5 = half, 1.0 = normal\\n\"\n \"sensitivity=0.0\\n\"\n \"; enable C&C/RA mouse hack\\n\"\n \"mhack=true\\n\"\n \"; enable C&C video resize hack, auto = auto-detect game, true = forced, false = disabled (OpenGL only)\\n\"\n \"vhack=false\\n\"\n \"; switch between OpenGL (opengl) and software (gdi) renderers, latter supports less features but might be faster depending on the GPU\\n\"\n \"renderer=gdi\\n\"\n \"; force CPU0 affinity, avoids crashes with RA, *might* have a performance impact\\n\"\n \"singlecpu=true\\n\"\n , fh);\n fclose(fh);\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"windowed\", \"TRUE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'n' || tolower(tmp[0]) == 'f' || tolower(tmp[0]) == 'd' || tmp[0] == '0')\n {\n This->windowed = FALSE;\n }\n else\n {\n This->windowed = TRUE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"border\", \"TRUE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'n' || tolower(tmp[0]) == 'f' || tolower(tmp[0]) == 'd' || tmp[0] == '0')\n {\n This->border = FALSE;\n }\n else\n {\n This->border = TRUE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"boxing\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'n' || tolower(tmp[0]) == 'f' || tolower(tmp[0]) == 'd' || tmp[0] == '0')\n {\n This->boxing = FALSE;\n }\n else\n {\n This->boxing = TRUE;\n }\n\n This->render.maxfps = GetPrivateProfileIntA(\"ddraw\", \"maxfps\", 0, ini_path);\n This->render.width = GetPrivateProfileIntA(\"ddraw\", \"width\", 0, ini_path);\n This->render.height = GetPrivateProfileIntA(\"ddraw\", \"height\", 0, ini_path);\n\n This->render.bpp = GetPrivateProfileIntA(\"ddraw\", \"bpp\", 32, ini_path);\n if (This->render.bpp != 16 && This->render.bpp != 24 && This->render.bpp != 32)\n {\n This->render.bpp = 0;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"filter\", tmp, tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'l' || tolower(tmp[3]) == 'l')\n {\n This->render.filter = 1;\n }\n else\n {\n This->render.filter = 0;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"adjmouse\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->adjmouse = TRUE;\n }\n else\n {\n This->adjmouse = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"mhack\", \"TRUE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->mhack = TRUE;\n }\n else\n {\n This->mhack = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"devmode\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->devmode = TRUE;\n This->mhack = FALSE;\n }\n else\n {\n This->devmode = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"vsync\", \"FALSE\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->vsync = TRUE;\n }\n else\n {\n This->vsync = FALSE;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"sensitivity\", \"0\", tmp, sizeof(tmp), ini_path);\n This->sensitivity = strtof(tmp, NULL);\n\n GetPrivateProfileStringA(\"ddraw\", \"vhack\", \"false\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n This->vhack = 2;\n }\n else if(tolower(tmp[0]) == 'a')\n {\n This->vhack = 1;\n }\n else\n {\n This->vhack = 0;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"renderer\", \"gdi\", tmp, sizeof(tmp), ini_path);\n if(tolower(tmp[0]) == 'd' || tolower(tmp[0]) == 'd')\n {\n printf(\"DirectDrawCreate: Using dummy renderer\\n\");\n This->renderer = render_dummy_main;\n }\n else if(tolower(tmp[0]) == 's' || tolower(tmp[0]) == 'g')\n {\n printf(\"DirectDrawCreate: Using software renderer\\n\");\n This->renderer = render_soft_main;\n }\n else\n {\n printf(\"DirectDrawCreate: Using OpenGL renderer\\n\");\n This->renderer = render_main;\n }\n\n GetPrivateProfileStringA(\"ddraw\", \"singlecpu\", \"true\", tmp, sizeof(tmp), ini_path);\n if (tolower(tmp[0]) == 'y' || tolower(tmp[0]) == 't' || tolower(tmp[0]) == 'e' || tmp[0] == '1')\n {\n printf(\"DirectDrawCreate: Setting CPU0 affinity\\n\");\n SetProcessAffinityMask(GetCurrentProcess(), 1);\n }\n\n /* last minute check for cnc-plugin */\n if (GetEnvironmentVariable(\"DDRAW_WINDOW\", tmp, sizeof(tmp)) > 0)\n {\n This->hWnd = (HWND)atoi(tmp);\n This->renderer = render_dummy_main;\n This->windowed = TRUE;\n\n if (GetEnvironmentVariable(\"DDRAW_WIDTH\", tmp, sizeof(tmp)) > 0)\n {\n This->render.width = atoi(tmp);\n }\n\n if (GetEnvironmentVariable(\"DDRAW_HEIGHT\", tmp, sizeof(tmp)) > 0)\n {\n This->render.height = atoi(tmp);\n }\n\n printf(\"DirectDrawCreate: Detected cnc-plugin at window %08X in %dx%d\\n\", (unsigned int)This->hWnd, This->render.width, This->render.height);\n }\n\n\n return DD_OK;\n}\n", "render.c": "/*\n * Copyright (c) 2010 Toni Spets \n *\n * Permission to use, copy, modify, and distribute this software for any\n * purpose with or without fee is hereby granted, provided that the above\n * copyright notice and this permission notice appear in all copies.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\n * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\n * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\n * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\n * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\n * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n */\n\n/*\nEdits by Ben Lankamp\n\nAdded pillar box rendering for OpenGL\n*/\n#include \n#include \n\n#include \"main.h\"\n#include \"surface.h\"\n\n#define CUTSCENE_WIDTH 640\n#define CUTSCENE_HEIGHT 400\n\n#define ASPECT_RATIO 4/3\n\nBOOL detect_cutscene();\n\nDWORD WINAPI render_main(void)\n{\n int i,j,prevRow,nextRow;\n HGLRC hRC;\n\n // fixed: texture not square but ASPECT RATIO scaled\n int tex_width = ddraw->width > 1024 ? ddraw->width : 1024;\n int tex_height = ddraw->height > 768 ? ddraw->height : 768;\n float scale_w = 1.0f;\n float scale_h = 1.0f;\n int *tex = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, tex_width * tex_height * sizeof(int));\n\n hRC = wglCreateContext( ddraw->render.hDC );\n wglMakeCurrent( ddraw->render.hDC, hRC );\n\n char *glext = (char *)glGetString(GL_EXTENSIONS);\n\n if(glext && strstr(glext, \"WGL_EXT_swap_control\"))\n {\n BOOL (APIENTRY *wglSwapIntervalEXT)(int) = (BOOL (APIENTRY *)(int))wglGetProcAddress(\"wglSwapIntervalEXT\");\n if(wglSwapIntervalEXT)\n {\n if(ddraw->vsync)\n {\n wglSwapIntervalEXT(1);\n }\n else\n {\n wglSwapIntervalEXT(0);\n }\n }\n }\n\n DWORD tick_start = 0;\n DWORD tick_end = 0;\n DWORD frame_len = 0;\n\n if(ddraw->render.maxfps < 0)\n {\n ddraw->render.maxfps = ddraw->mode.dmDisplayFrequency;\n }\n\n if(ddraw->render.maxfps > 0)\n {\n frame_len = 1000.0f / ddraw->render.maxfps;\n }\n\n glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, tex_width, tex_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, tex);\n\n // define screen width and height\n // define projected width and height (aspect ratio preserved)\n int screenWidth = ddraw->render.width;\n int screenHeight = ddraw->render.height;\n\n // define projection width and height depending on the aspect ratio\n // this effectively sets a pillar box view\n int projectedHeight = screenHeight;\n int projectedWidth = projectedHeight * ASPECT_RATIO;\n int projectedLeft, projectedTop;\n\n if(ddraw->boxing)\n {\n projectedLeft = (screenWidth - projectedWidth) / 2;\n projectedTop = (screenHeight - projectedHeight) / 2;\n\n glViewport(projectedLeft, projectedTop, projectedWidth, projectedHeight);\n }\n else\n {\n projectedWidth = screenWidth;\n projectedHeight = screenHeight;\n glViewport(0, 0, screenWidth, screenHeight);\n }\n\n if(ddraw->render.filter)\n {\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);\n }\n else\n {\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);\n glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);\n }\n\n glEnable(GL_TEXTURE_2D);\n\t\n\ttimeBeginPeriod(1);\n\t\n\n\n while(ddraw->render.run && WaitForSingleObject(ddraw->render.sem, INFINITE) != WAIT_FAILED)\n {\n\t\tscale_w = (float)ddraw->width/tex_width;\n\t\tscale_h = (float)ddraw->height/tex_height;\n\t\n if(ddraw->render.maxfps > 0)\n {\n tick_start = GetTickCount();\n }\n\n /* convert ddraw surface to opengl texture */\n EnterCriticalSection(&ddraw->cs);\n\n if(ddraw->primary && ddraw->primary->palette)\n {\n if(ddraw->vhack && detect_cutscene())\n {\n scale_w *= (float)CUTSCENE_WIDTH / ddraw->width;\n scale_h *= (float)CUTSCENE_HEIGHT / ddraw->height;\n\n if (ddraw->cursorclip.width != CUTSCENE_WIDTH || ddraw->cursorclip.height != CUTSCENE_HEIGHT)\n {\n ddraw->cursorclip.width = CUTSCENE_WIDTH;\n ddraw->cursorclip.height = CUTSCENE_HEIGHT;\n ddraw->cursor.x = CUTSCENE_WIDTH / 2;\n ddraw->cursor.y = CUTSCENE_HEIGHT / 2;\n }\n }\n else\n {\n if (ddraw->cursorclip.width != ddraw->width || ddraw->cursorclip.height != ddraw->height)\n {\n ddraw->cursorclip.width = ddraw->width;\n ddraw->cursorclip.height = ddraw->height;\n ddraw->cursor.x = ddraw->width / 2;\n ddraw->cursor.y = ddraw->height / 2;\n }\n }\n\n // regular paint\n for(i=0; iheight; i++)\n {\n for(j=0; jwidth; j++)\n {\n tex[i*ddraw->width+j] = ddraw->primary->palette->data_bgr[((unsigned char *)ddraw->primary->surface)[i*ddraw->primary->lPitch + j*ddraw->primary->lXPitch]];\n }\n }\n\n // poor man's deinterlace\n if(ddraw->vhack && detect_cutscene())\n {\n for(i = 1; i < (ddraw->height - 1); i += 2)\n {\n for(j=0; jwidth; j++)\n {\n if(tex[i*ddraw->width+j] == 0)\n {\n prevRow = tex[(i-1)*ddraw->width+j];\n nextRow = tex[(i+1)*ddraw->width+j];\n\n tex[i*ddraw->width+j] = (prevRow+nextRow)/2;\n }\n }\n }\n }\n }\n LeaveCriticalSection(&ddraw->cs);\n\n glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, ddraw->width, ddraw->height, GL_RGBA, GL_UNSIGNED_BYTE, tex);\n\n glBegin(GL_TRIANGLE_FAN);\n glTexCoord2f(0,0); glVertex2f(-1, 1);\n glTexCoord2f(scale_w,0); glVertex2f( 1, 1);\n glTexCoord2f(scale_w,scale_h); glVertex2f( 1, -1);\n glTexCoord2f(0,scale_h); glVertex2f(-1, -1);\n glEnd();\n\t\t\n\t\tSwapBuffers(ddraw->render.hDC); \n\n if((ddraw->render.maxfps > 0))\n { \n\t\t\ttick_end = GetTickCount();\n\t\t\t\n if(tick_end - tick_start < frame_len)\n {\n\t\t\t\tSleep( frame_len - (tick_end - tick_start));\n }\n }\n\n SetEvent(ddraw->render.ev);\n }\n\ttimeEndPeriod(1);\n\t\t\n HeapFree(GetProcessHeap(), 0, tex);\n\n wglMakeCurrent(NULL, NULL);\n wglDeleteContext(hRC);\n\n return 0;\n}\n", "render_soft.c": "/*\n * Copyright (c) 2011 Toni Spets \n *\n * Permission to use, copy, modify, and distribute this software for any\n * purpose with or without fee is hereby granted, provided that the above\n * copyright notice and this permission notice appear in all copies.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\n * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\n * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\n * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\n * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\n * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n */\n\n#include \n#include \n\n#include \"main.h\"\n#include \"surface.h\"\n\n#define CUTSCENE_WIDTH 640\n#define CUTSCENE_HEIGHT 400\n\nstatic unsigned char getPixel(int x, int y)\n{\n\treturn ((unsigned char *)ddraw->primary->surface)[y*ddraw->primary->lPitch + x*ddraw->primary->lXPitch];\n}\n\nint* InMovie = (int*)0x00665F58;\nint* IsVQA640 = (int*)0x0065D7BC; \nBYTE* ShouldStretch = (BYTE*)0x00607D78;\n\nBOOL detect_cutscene()\n{\n\tif(ddraw->width <= CUTSCENE_WIDTH || ddraw->height <= CUTSCENE_HEIGHT)\n\t\treturn FALSE;\n\t\t\n\tif (ddraw->isredalert == TRUE)\n\t{\n\t\tif ((*InMovie && !*IsVQA640) || *ShouldStretch)\n\t\t{\n\t\t\treturn TRUE;\n\t\t}\n\t\treturn FALSE;\n\t}\n\n\treturn getPixel(CUTSCENE_WIDTH + 1, 0) == 0 || getPixel(CUTSCENE_WIDTH + 5, 1) == 0 ? TRUE : FALSE;\t\n}\n\nDWORD WINAPI render_soft_main(void)\n{\n PBITMAPINFO bmi = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(BITMAPINFOHEADER) + sizeof(RGBQUAD) * 256);\n\n bmi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);\n bmi->bmiHeader.biWidth = ddraw->width;\n bmi->bmiHeader.biHeight = -ddraw->height;\n bmi->bmiHeader.biPlanes = 1;\n bmi->bmiHeader.biBitCount = ddraw->bpp;\n bmi->bmiHeader.biCompression = BI_RGB;\n\n DWORD dst_top = 0;\n DWORD dst_left = 0;\n DWORD dst_width = ddraw->render.width;\n DWORD dst_height = ddraw->render.height;\n\n DWORD tick_start = 0;\n DWORD tick_end = 0;\n DWORD frame_len = 0;\n\t\n\ttimeBeginPeriod(1);\n\n if (ddraw->boxing)\n {\n dst_width = ddraw->width;\n dst_height = ddraw->height;\n\n /* test if we can double scale the window */\n if (ddraw->width * 2 <= ddraw->render.width && ddraw->height * 2 <= ddraw->render.height)\n {\n dst_width *= 2;\n dst_height *= 2;\n }\n\n dst_top = ddraw->render.height / 2 - dst_height / 2;\n dst_left = ddraw->render.width / 2 - dst_width / 2;\n }\n\n if(ddraw->render.maxfps < 0)\n {\n ddraw->render.maxfps = ddraw->mode.dmDisplayFrequency;\n }\n\n if(ddraw->render.maxfps > 0)\n {\n frame_len = 1000.0f / ddraw->render.maxfps;\n }\n\n while (ddraw->render.run && WaitForSingleObject(ddraw->render.sem, INFINITE) != WAIT_FAILED)\n {\n if(ddraw->render.maxfps > 0)\n {\n tick_start = GetTickCount();\n }\n\t\t\n\t\t EnterCriticalSection(&ddraw->cs);\n\n if (ddraw->primary && (ddraw->primary->palette || ddraw->bpp == 16))\n {\n if (ddraw->primary->palette && ddraw->primary->palette->data_rgb == NULL)\n {\n ddraw->primary->palette->data_rgb = &bmi->bmiColors[0];\n }\n\n if ((ddraw->render.width != ddraw->width || ddraw->render.height != ddraw->height) && !(ddraw->vhack && detect_cutscene()) )\n {\n StretchDIBits(ddraw->render.hDC, dst_left, dst_top, dst_width, dst_height, 0, 0, ddraw->width, ddraw->height, ddraw->primary->surface, bmi, DIB_RGB_COLORS, SRCCOPY);\n }\n\t\t\telse if (!(ddraw->vhack && detect_cutscene()))\n\t\t\t{\n\t\t\t\tSetDIBitsToDevice(ddraw->render.hDC, 0, 0, ddraw->width, ddraw->height, 0, 0, 0, ddraw->height, ddraw->primary->surface, bmi, DIB_RGB_COLORS);\n\t\t\t}\n\n }\n\t\tif (ddraw->vhack && ddraw->primary && detect_cutscene()) // for vhack\n\t\t{\n\t\t\tif (ddraw->primary->palette && ddraw->primary->palette->data_rgb == NULL)\n {\n ddraw->primary->palette->data_rgb = &bmi->bmiColors[0];\n }\n\t\t\t// for 800 x 600:\n\t\t\t//StretchDIBits(ddraw->render.hDC, 0, 0, ddraw->render.width, ddraw->render.height, 0, 200, CUTSCENE_WIDTH, CUTSCENE_HEIGHT, ddraw->primary->surface, bmi, DIB_RGB_COLORS, SRCCOPY);\n\t\t\t\n\t\t\t\t\t\tStretchDIBits(ddraw->render.hDC, 0, 0, ddraw->render.width, ddraw->render.height, 0, ddraw->height-400, CUTSCENE_WIDTH, CUTSCENE_HEIGHT, ddraw->primary->surface, bmi, DIB_RGB_COLORS, SRCCOPY);\n\n\t\t\t\n\t\t\tif (ddraw->primary->palette && (ddraw->cursorclip.width != CUTSCENE_WIDTH || ddraw->cursorclip.height != CUTSCENE_HEIGHT))\n\t\t\t{\n\t\t\t\tddraw->cursorclip.width = CUTSCENE_WIDTH;\n\t\t\t\tddraw->cursorclip.height = CUTSCENE_HEIGHT;\n\t\t\t\tddraw->cursor.x = CUTSCENE_WIDTH / 2;\n\t\t\t\tddraw->cursor.y = CUTSCENE_HEIGHT / 2;\n\t\t\t}\n\t\t}\n\t\telse if(ddraw->primary && ddraw->primary->palette && (ddraw->cursorclip.width != ddraw->width || ddraw->cursorclip.height != ddraw->height))\n\t\t{\n\t\t\tddraw->cursorclip.width = ddraw->width;\n\t\t\tddraw->cursorclip.height = ddraw->height;\n\t\t\tddraw->cursor.x = ddraw->width / 2;\n\t\t\tddraw->cursor.y = ddraw->height / 2;\n\t\t}\n\n LeaveCriticalSection(&ddraw->cs);\n\n if((ddraw->render.maxfps > 0) && !detect_cutscene())\n {\n tick_end = GetTickCount();\n\n if(tick_end - tick_start < frame_len)\n {\n Sleep( frame_len - (tick_end - tick_start) + 1);\n }\n }\n SetEvent(ddraw->render.ev);\n }\n\ttimeEndPeriod(1);\n\n HeapFree(GetProcessHeap(), 0, bmi);\n\n return TRUE;\n}\n"}}
-{"repo": "krisleech/jQuery-Character-Counter", "pr_number": 1, "title": "add a class if character limit exceed's ", "state": "closed", "merged_at": "2013-03-04T18:36:56Z", "additions": 37, "deletions": 21, "files_changed": ["counter.jquery.js", "index.html"], "files_before": {"counter.jquery.js": "(function($){\n $.fn.counter = function() {\n return this.each(function() {\n max_length = parseInt($(this).attr('data-max-length'));\n\n var length = $(this).val().length; \n $(this).parent().find('.counter_label').html(max_length-length + ' characters left');\n // bind on key up event\n $(this).keyup(function(){\n // calc length and truncate if needed\n var new_length = $(this).val().length;\n if (new_length > max_length-1) {\n $(this).val($(this).val().substring(0, options.max_length));\n }\n // update visual counter\n $(this).parent().find('.counter_label').html(max_length-new_length + ' characters left');\n });\n });\n };\n})(jQuery);\n\n\n", "index.html": "\n\n\n\n\t\n\tindex\n\t\n\t\n\t\n\t\n\t\n\n\n\t\n\t\n\t\n\t\n\n\n"}, "files_after": {"counter.jquery.js": "(function ($) {\n $.fn.counter = function (options) {\n var defaults={\n limitExceedClass:''\n };\n var options=$.extend({},defaults,options);\n return this.each(function () {\n max_length = parseInt($(this).attr('data-max-length'));\n\n var length = $(this).val().length;\n $(this).parent().find('.counter_label').html(max_length - length + ' characters left');\n // bind on key up event\n $(this).keyup(function () {\n // calc length and truncate if needed\n var new_length = $(this).val().length;\n if (new_length > max_length - 1) {\n $(this).parent().find('.counter_label').addClass(options.limitExceedClass);\n }\n else {\n $(this).parent().find('.counter_label').removeClass(options.limitExceedClass);\n }\n // update visual counter\n $(this).parent().find('.counter_label').html(max_length - new_length + ' characters left');\n });\n });\n };\n})(jQuery);\n$(document).ready(function () {\n $('textarea').counter();\n\n})\n\n\n", "index.html": "\n\n\n\n\t\n\tindex\n\t\n\t\n\t\n\t\n\t\n\t\n\n\n\t\n\t\n\t\n\t\n\n\n"}}
-{"repo": "mgalgs/termship", "pr_number": 1, "title": "Use no-more-secrets", "state": "closed", "merged_at": "2016-09-19T09:33:24Z", "additions": 25, "deletions": 17, "files_changed": ["screen.c"], "files_before": {"screen.c": "/* -*- c-basic-offset: 2 -*- */\n/**\n * This file contains all the ui routines.\n */\n\n\n#include \n#include \n#include \n#include \n#include \n#include \n#include \"gamepieces.h\"\n#include \"connection.h\"\n#include \"screen.h\"\n#include \"log.h\"\n#include \"common.h\"\n\n#define MAX_NAME 100\n\nextern Ship Shipset[];\nextern Ship PeerShipset[];\nchar global_user_name[MAX_NAME];\nchar peer_user_name[MAX_NAME];\nint user_mode;\ntypedef enum SHOT_SPOT {\n UNTOUCHED=0,\n MISS,\n HIT\n} SHOT_SPOT;\nSHOT_SPOT player_shots[BOARD_SIZE][BOARD_SIZE]; /* 1=hit, 2=miss */\nSHOT_SPOT peer_shots[BOARD_SIZE][BOARD_SIZE]; /* 1=hit, 2=miss */\n\n\nWINDOW *player_win;\nWINDOW *opponent_win;\nWINDOW *status_win;\n\nvoid place_hit_or_mis(WINDOW * win,int mesg, int x, int y, bool was_peer_shot)\n{\n //-2game -1 hit sink 1hit 0miss\n //deal with hits first\n\n if ((mesg == -2) || (mesg == -1) || (mesg == 1)) {\n wattron(win,COLOR_PAIR(4));\n mvwprintw(win, y+2, x*2+3,\"#\");\n wattroff(win,COLOR_PAIR(4));\n wrefresh(win);\n if (was_peer_shot)\n peer_shots[x][y] = HIT;\n else\n player_shots[x][y] = HIT;\n } else { // miss\n wattron(win,COLOR_PAIR(3));\n mvwprintw(win, y+2, x*2+3,\"@\");\n wattroff(win,COLOR_PAIR(3));\n wrefresh(win);\n if (was_peer_shot)\n peer_shots[x][y] = MISS;\n else\n player_shots[x][y] = MISS;\n }\n}\n\n/**\n * Display battlefields after exchanging boards.\n */\nvoid show_battlefields()\n{\n /* dump battlefields: */\n if (user_mode == SERVER_MODE) {\n write_to_log(\"player_shots:\\n\");\n for (int i=0; i 3+startx+20) {\n playerx -=2;\n player_pos.x--;\n move(playery, playerx);\n break;\n }\n break;\n case KEY_RIGHT:\n if (playerx < -3+startx+20+width) {\n playerx +=2;\n player_pos.x++;\n move(playery, playerx);\n break; \n }\n break;\n case KEY_UP:\n if (playery > 2+starty) {\n --playery;\n --player_pos.y;\n move(playery, playerx);\n break;\n }\n break;\n case KEY_DOWN:\n if (playery < starty+height-2) {\n ++playery;\n ++player_pos.y;\n move(playery, playerx);\n break; \n }\n case 10:\n case KEY_ENTER:\n if (player_shots[player_pos.x][player_pos.y] == UNTOUCHED) {\n *x = player_pos.x;\n *y = player_pos.y;\n return;\n } else {\n move(playery, playerx);\n }\n break;\n \n }\n } \n}\n\nvoid display_boards(void)\n{\n int startx, starty, width, height; \n int stat_width, stat_height;\n\n char players_grid[BOARD_SIZE][BOARD_SIZE];\n\n int f, h = 0;\n char t;\n int i;\n stat_height= 5;\n stat_width=50;\n\n keypad(stdscr, TRUE); \n height = 3+BOARD_SIZE; \n width = 14+BOARD_SIZE; \n starty = (LINES - height) / 2; \n startx = (COLS - width) / 2; \n clear();\n refresh(); \n\n player_win = newwin(height, width, starty, startx+20); \n box(player_win, 0, 0);\n wrefresh(player_win);\n\n opponent_win = newwin(height, width, starty, startx-20);\n box(opponent_win, 0, 0);\n wrefresh(opponent_win);\n\n status_win = newwin(stat_height, stat_width, starty+13, startx-20);\n\n create_grid(players_grid, Shipset);\n\n clear();\n refresh();\n\n mvprintw(starty-1, startx-15, global_user_name);\n mvwprintw(opponent_win, 1,1,\" A B C D E F G H I J\");\n wattron(opponent_win,COLOR_PAIR(2));\n mvwprintw(opponent_win, 1, 1, \" \");\n wattroff(opponent_win,COLOR_PAIR(2));\n\n for (h = 0; h -1);\n\n sprintf(msg, \"Game over! You %s!\\nPress any key to view battlefields.\", win_status ? \"won\" : \"lost\");\n show_message_box(msg);\n getch();\n exchange_shipsets(sock);\n show_battlefields();\n}\n\n\nvoid title_screen()\n{\n char *picture[] = {\n \" # # ( )\",\n \" ___#_#___|__\",\n \" _ |____________| _\",\n \" _=====| | | | | |==== _\",\n \" =====| |.---------------------------. | |====\",\n \" <--------------------' . . . . . . . . '--------------/\",\n \" \\\\ /\",\n \" \\\\_______________________________________________WWS_________/\",\n \" wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww\",\n \"wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww\",\n \" wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww\",\n NULL\n };\n\n print_picture(stdscr, picture);\n\n /* int numsquiggles = 8; */\n /* int numreps = 2; */\n /* int framespeed = 60000; */\n /* do a little \"animation\" */\n /* for (int i=0; i0; --j) { */\n /* char msg[100]; */\n /* int s=0; */\n /* for (int k=0; k largest_line_length\n ? i-prev_newline_index\n : largest_line_length;\n prev_newline_index = i;\n if (string[i] == '\\n')\n height++;\n /* sprintf(msg, \"found newline or null at %d. now height is %d largest line is %d\\n\", i, height, largest_line_length); */\n /* write_to_log(msg); */\n }\n }\n largest_line_length = largest_line_length == 0\n ? strlen(string)\n : largest_line_length;\n /* sprintf(msg, \"At the end, now height is %d largest line is %d\\n\", height, largest_line_length); */\n /* write_to_log(msg); */\n\n width = largest_line_length + 6;\n\n\n /* if there's an existing message box up and this string is a\n different length than the last one, we need to recompute the\n width, so we just hide the message box. */\n if (*last_box_width != width || *last_box_height != height) {\n sprintf(msg, \"implicit hide of the message box because %d != %d || %d != %d\\n\",\n *last_box_width, width, *last_box_height, height);\n write_to_log(msg);\n hide_message_box_win(win, pan);\n }\n\n if (*win == NULL) {\n *win = newwin(height, width,\n (LINES-height)/2,\n (COLS-width)/2);\n sprintf(msg, \"created new win at *win %p\\n\", *win);\n write_to_log(msg);\n }\n if (*pan == NULL) {\n *pan = new_panel(*win);\n sprintf(msg, \"created new *pan at %p\\n\", *pan);\n write_to_log(msg);\n }\n wattron(*win, BLUE_ON_BLACK);\n box(*win, 0, 0);\n wattroff(*win, BLUE_ON_BLACK);\n /* border(186, 186, 205, 205, */\n /* 201, 187, 200, 188); */\n /* border(ls, rs, chtype ts, chtype bs, */\n /* chtype tl, chtype tr, chtype bl, chtype br); */\n\n int current_y = 1;\n wattron(*win, WHITE_ON_RED);\n for (int i=0; i maxlen ? len : maxlen;\n }\n return maxlen;\n}\n\nvoid print_picture(WINDOW *win, char *picture[])\n{\n /* get width of picture */\n int picwidth = get_picture_width(picture);\n int leftoffset = (COLS - picwidth)/2;\n int topoffset = 2;\n for (int i=0; picture[i] != NULL; ++i) {\n mvwprintw(win, topoffset+i, leftoffset, picture[i]);\n }\n}\n\n/**\n * dest should have enough space (at least len) to hold the string.\n */\nvoid get_text_string_from_centered_panel(char const *const prompt, char *dest, int len)\n{\n WINDOW *panel_win;\n PANEL *the_panel;\n int panel_height=6,panel_width;\n /* char *dest = malloc(100); */\n\n int promptlen = strlen(prompt);\n panel_width = MAX(30, promptlen+5);\n\n /* Create the window to hold the panel */\n panel_win = newwin(panel_height,\n panel_width,\n (LINES-panel_height)/2,\n (COLS-panel_width)/2);\n box(panel_win, 0, 0);\n print_in_middle(panel_win, 1,\n 0, panel_width,\n prompt, COLOR_PAIR(6));\n wattron(panel_win, COLOR_PAIR(5));\n mvwhline(panel_win, 3, 2, ' ', panel_width-4);\n curs_set(1); // make cursor visible\n echo();\n mvwgetnstr(panel_win, 3, 2, dest, len);\n noecho();\n curs_set(0); // make cursor invisible\n wattroff(panel_win, COLOR_PAIR(5));\n \n /* create the panel from our window */\n the_panel = new_panel(panel_win);\n top_panel(the_panel);\n update_panels();\n doupdate();\n\n del_panel(the_panel);\n update_panels();\n delwin(panel_win);\n doupdate();\n}\n\n\n/**\n * Constructs and returns a new Animation object. This doesn't load\n * the animation. That will be done at play time or you can call\n * load_animation to do it manually. The animation should be free'd by\n * the user.\n */\nAnimation *create_animation(char *loadFile)\n{\n Animation *anim = (Animation *)malloc(sizeof(Animation));\n KINDLY_DIE_IF_NULL(anim);\n anim->isLoaded = false;\n anim->loadFile = (char *) malloc(sizeof(char) * MAX_FILE_LEAF_NAME);\n KINDLY_DIE_IF_NULL(anim->loadFile);\n strcpy(anim->loadFile, loadFile);\n return anim;\n}\n\nvoid destroy_animation(Animation *anim)\n{\n for(int i=0; i < anim->numFrames; ++i) {\n free(anim->frames[i]);\n }\n free(anim->frames);\n free(anim->loadFile);\n free(anim);\n}\n\n\n/**\n * Loads up the animation. Make sure you set the `loadFile` attribute\n * of the Animation before calling this function.\n */\nvoid load_animation(Animation *anim)\n{\n FILE *fp;\n char msg[500], loadFileFullPath[MAX_FILE_FULL_PATH];\n char *line, *thisFrame;\n size_t len=0;\n ssize_t read;\n\n sprintf(loadFileFullPath, \"%s/animations/%s\", xstr(TERMSHIP_PATH), anim->loadFile);\n sprintf(msg, \"Loading animation file from %s...\\n\", loadFileFullPath);\n write_to_log(msg);\n\n fp = fopen(loadFileFullPath, \"r\");\n if (fp == NULL) {\n cleanup_ncurses();\n printf(\"couldn't open %s for reading...\\n\", loadFileFullPath);\n exit(EXIT_FAILURE);\n }\n\n /*** read the header lines: ***/\n /* First is the size (in lines) of each of the frames we're about to\n read */\n line = NULL;\n read = getline(&line, &len, fp);\n sscanf(line, \"%d\\n\", &(anim->height));\n free(line);\n sprintf(msg, \"%s has height %d\\n\", loadFileFullPath,\n anim->height);\n write_to_log(msg);\n /* Next is the total number of frames */\n line = NULL;\n read = getline(&line, &len, fp);\n sscanf(line, \"%d\\n\", &(anim->numFrames));\n free(line);\n sprintf(msg, \"%s has %d total frames\\n\", loadFileFullPath, anim->numFrames);\n write_to_log(msg);\n /* Next is the desired frame rate: */\n line = NULL;\n read = getline(&line, &len, fp);\n sscanf(line, \"%d\\n\", &(anim->fps));\n free(line);\n sprintf(msg, \"%s will run at %d fps\\n\", loadFileFullPath, anim->fps);\n write_to_log(msg);\n\n (void)read; /*compiler warnings*/\n\n /* Allocate space for the animation (the frames, not the actual lines quite yet): */\n anim->frames = (char **) malloc(sizeof(char *) * anim->numFrames);\n KINDLY_DIE_IF_NULL(anim->frames);\n thisFrame = (char *) malloc(sizeof(char) * anim->height * MAX_FRAME_WIDTH);\n KINDLY_DIE_IF_NULL(thisFrame);\n\n int max_width = 0;\n for (int i=0; i < anim->numFrames; ++i) {\n bool last_char_was_newline = false;\n int chars_read;\n for (chars_read=0; ; ++chars_read) {\n int ch = fgetc(fp);\n /* sprintf(msg, \"[%d] => %c\\n\", chars_read, (char)ch); */\n /* write_to_log(msg); */\n thisFrame[chars_read] = ch;\n if (ch == '\\n') {\n /* two newlines in a row. next frame. */\n if (last_char_was_newline)\n break;\n last_char_was_newline = true;\n } else {\n last_char_was_newline = false;\n }\n }\n thisFrame[chars_read-1] = '\\0'; /* overwriting the final newline */\n anim->frames[i] = (char *) malloc((sizeof(char) * chars_read)); /* don't need +1 because we truncated the last newline */\n KINDLY_DIE_IF_NULL(anim->frames[i]);\n strcpy(anim->frames[i], thisFrame);\n\n max_width = MAX(chars_read, max_width);\n\n } /*eo for each line*/\n\n free(thisFrame);\n\n anim->width = max_width;\n\n anim->isLoaded = true;\n}\n\n/**\n * Plays the specified animation. Loads it if necessary.\n */\nvoid play_animation\n(Animation *anim, char *subtitle, bool press_key_to_continue, bool hold_at_end)\n{\n static WINDOW *animation_window = NULL;\n static PANEL *animation_panel = NULL;\n static int anim_width;\n static int anim_height;\n char msg[500];\n (void)msg;\n char *hold_message = \"\\n(Press any key to continue)\";\n\n if (!anim->isLoaded) load_animation(anim);\n\n anim_width = anim->width;\n anim_height = anim->height;\n\n if (subtitle != NULL)\n anim_height++;\n\n\n\n for (int i=0; i < anim->numFrames; ++i) {\n char *theframe = anim->frames[i];\n if (subtitle != NULL) {\n theframe = (char *) malloc(strlen(anim->frames[i]) + strlen(subtitle) + 2); /* +2 for extra null and newline */\n KINDLY_DIE_IF_NULL(theframe);\n strcpy(theframe, anim->frames[i]);\n strcat(theframe, \"\\n\");\n strcat(theframe, subtitle);\n }\n\n show_message_box_win(&animation_window, &animation_panel,\n\t\t\t theframe, &anim_width, &anim_height);\n\n if (subtitle != NULL)\n free(theframe);\n\n if (press_key_to_continue) {\n /* no delay if press_key_to_continue is set: */\n nodelay(animation_window, true);\n if (ERR != wgetch(animation_window)) {\n\tnodelay(animation_window, false);\n\treturn;\n }\n nodelay(animation_window, false);\n }\n\n /* we assume the show_message_box_win takes 0 time */\n usleep( (1/(float)anim->fps) * 1000000 );\n }\n\n if (hold_at_end) {\n char *hold_frame = (char *) malloc(strlen(anim->frames[anim->numFrames-1]) + strlen(hold_message) + 1);\n KINDLY_DIE_IF_NULL(hold_frame);\n strcpy(hold_frame, anim->frames[anim->numFrames-1]);\n strcat(hold_frame, hold_message);\n show_message_box_win(&animation_window, &animation_panel,\n hold_frame, &anim_width, &anim_height);\n getch();\n free(hold_frame);\n }\n\n hide_message_box_win(&animation_window, &animation_panel);\n}\n\n\nvoid cleanup_ncurses()\n{\n endwin(); /* end curses mode */\n}\n\nvoid kindly_die(char *msg)\n{\n cleanup_ncurses();\n printf(msg);\n exit(-1);\n}\n"}, "files_after": {"screen.c": "/* -*- c-basic-offset: 2 -*- */\n/**\n * This file contains all the ui routines.\n */\n\n\n#include \n#include \n#include \n#include \n#include \n#include \n#include \"gamepieces.h\"\n#include \"connection.h\"\n#include \"screen.h\"\n#include \"log.h\"\n#include \"common.h\"\n#include \"no-more-secrets/src/nms.h\"\n\n#define MAX_NAME 100\n\nextern Ship Shipset[];\nextern Ship PeerShipset[];\nchar global_user_name[MAX_NAME];\nchar peer_user_name[MAX_NAME];\nint user_mode;\ntypedef enum SHOT_SPOT {\n UNTOUCHED=0,\n MISS,\n HIT\n} SHOT_SPOT;\nSHOT_SPOT player_shots[BOARD_SIZE][BOARD_SIZE]; /* 1=hit, 2=miss */\nSHOT_SPOT peer_shots[BOARD_SIZE][BOARD_SIZE]; /* 1=hit, 2=miss */\n\n\nWINDOW *player_win;\nWINDOW *opponent_win;\nWINDOW *status_win;\n\nvoid place_hit_or_mis(WINDOW * win,int mesg, int x, int y, bool was_peer_shot)\n{\n //-2game -1 hit sink 1hit 0miss\n //deal with hits first\n\n if ((mesg == -2) || (mesg == -1) || (mesg == 1)) {\n wattron(win,COLOR_PAIR(4));\n mvwprintw(win, y+2, x*2+3,\"#\");\n wattroff(win,COLOR_PAIR(4));\n wrefresh(win);\n if (was_peer_shot)\n peer_shots[x][y] = HIT;\n else\n player_shots[x][y] = HIT;\n } else { // miss\n wattron(win,COLOR_PAIR(3));\n mvwprintw(win, y+2, x*2+3,\"@\");\n wattroff(win,COLOR_PAIR(3));\n wrefresh(win);\n if (was_peer_shot)\n peer_shots[x][y] = MISS;\n else\n player_shots[x][y] = MISS;\n }\n}\n\n/**\n * Display battlefields after exchanging boards.\n */\nvoid show_battlefields()\n{\n /* dump battlefields: */\n if (user_mode == SERVER_MODE) {\n write_to_log(\"player_shots:\\n\");\n for (int i=0; i 3+startx+20) {\n playerx -=2;\n player_pos.x--;\n move(playery, playerx);\n break;\n }\n break;\n case KEY_RIGHT:\n if (playerx < -3+startx+20+width) {\n playerx +=2;\n player_pos.x++;\n move(playery, playerx);\n break; \n }\n break;\n case KEY_UP:\n if (playery > 2+starty) {\n --playery;\n --player_pos.y;\n move(playery, playerx);\n break;\n }\n break;\n case KEY_DOWN:\n if (playery < starty+height-2) {\n ++playery;\n ++player_pos.y;\n move(playery, playerx);\n break; \n }\n case 10:\n case KEY_ENTER:\n if (player_shots[player_pos.x][player_pos.y] == UNTOUCHED) {\n *x = player_pos.x;\n *y = player_pos.y;\n return;\n } else {\n move(playery, playerx);\n }\n break;\n \n }\n } \n}\n\nvoid display_boards(void)\n{\n int startx, starty, width, height; \n int stat_width, stat_height;\n\n char players_grid[BOARD_SIZE][BOARD_SIZE];\n\n int f, h = 0;\n char t;\n int i;\n stat_height= 5;\n stat_width=50;\n\n keypad(stdscr, TRUE); \n height = 3+BOARD_SIZE; \n width = 14+BOARD_SIZE; \n starty = (LINES - height) / 2; \n startx = (COLS - width) / 2; \n clear();\n refresh(); \n\n player_win = newwin(height, width, starty, startx+20); \n box(player_win, 0, 0);\n wrefresh(player_win);\n\n opponent_win = newwin(height, width, starty, startx-20);\n box(opponent_win, 0, 0);\n wrefresh(opponent_win);\n\n status_win = newwin(stat_height, stat_width, starty+13, startx-20);\n\n create_grid(players_grid, Shipset);\n\n clear();\n refresh();\n\n mvprintw(starty-1, startx-15, global_user_name);\n mvwprintw(opponent_win, 1,1,\" A B C D E F G H I J\");\n wattron(opponent_win,COLOR_PAIR(2));\n mvwprintw(opponent_win, 1, 1, \" \");\n wattroff(opponent_win,COLOR_PAIR(2));\n\n for (h = 0; h -1);\n\n sprintf(msg, \"Game over! You %s!\\nPress any key to view battlefields.\", win_status ? \"won\" : \"lost\");\n show_message_box(msg);\n getch();\n exchange_shipsets(sock);\n show_battlefields();\n}\n\n\nvoid title_screen()\n{\n char *picture = \n \" # # ( )\\n\"\n \" ___#_#___|__\\n\"\n \" _ |____________| _\\n\"\n \" _=====| | | | | |==== _\\n\"\n \" =====| |.---------------------------. | |====\\n\"\n \" <--------------------' . . . . . . . . '--------------/\\n\"\n \" \\\\ /\\n\"\n \" \\\\_______________________________________________WWS_________/\\n\"\n \" wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww\\n\"\n \"wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww\\n\"\n \" wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww\\n\";\n\n NmsArgs args = INIT_NMSARGS;\n args.src = picture;\n nms_exec(&args);\n clear();\n //print_picture(stdscr, picture);\n\n /* int numsquiggles = 8; */\n /* int numreps = 2; */\n /* int framespeed = 60000; */\n /* do a little \"animation\" */\n /* for (int i=0; i0; --j) { */\n /* char msg[100]; */\n /* int s=0; */\n /* for (int k=0; k largest_line_length\n ? i-prev_newline_index\n : largest_line_length;\n prev_newline_index = i;\n if (string[i] == '\\n')\n height++;\n /* sprintf(msg, \"found newline or null at %d. now height is %d largest line is %d\\n\", i, height, largest_line_length); */\n /* write_to_log(msg); */\n }\n }\n largest_line_length = largest_line_length == 0\n ? strlen(string)\n : largest_line_length;\n /* sprintf(msg, \"At the end, now height is %d largest line is %d\\n\", height, largest_line_length); */\n /* write_to_log(msg); */\n\n width = largest_line_length + 6;\n\n\n /* if there's an existing message box up and this string is a\n different length than the last one, we need to recompute the\n width, so we just hide the message box. */\n if (*last_box_width != width || *last_box_height != height) {\n sprintf(msg, \"implicit hide of the message box because %d != %d || %d != %d\\n\",\n *last_box_width, width, *last_box_height, height);\n write_to_log(msg);\n hide_message_box_win(win, pan);\n }\n\n if (*win == NULL) {\n *win = newwin(height, width,\n (LINES-height)/2,\n (COLS-width)/2);\n sprintf(msg, \"created new win at *win %p\\n\", *win);\n write_to_log(msg);\n }\n if (*pan == NULL) {\n *pan = new_panel(*win);\n sprintf(msg, \"created new *pan at %p\\n\", *pan);\n write_to_log(msg);\n }\n wattron(*win, BLUE_ON_BLACK);\n box(*win, 0, 0);\n wattroff(*win, BLUE_ON_BLACK);\n /* border(186, 186, 205, 205, */\n /* 201, 187, 200, 188); */\n /* border(ls, rs, chtype ts, chtype bs, */\n /* chtype tl, chtype tr, chtype bl, chtype br); */\n\n int current_y = 1;\n wattron(*win, WHITE_ON_RED);\n for (int i=0; i maxlen ? len : maxlen;\n }\n return maxlen;\n}\n\nvoid print_picture(WINDOW *win, char *picture[])\n{\n /* get width of picture */\n int picwidth = get_picture_width(picture);\n int leftoffset = (COLS - picwidth)/2;\n int topoffset = 2;\n for (int i=0; picture[i] != NULL; ++i) {\n mvwprintw(win, topoffset+i, leftoffset, picture[i]);\n }\n}\n\n/**\n * dest should have enough space (at least len) to hold the string.\n */\nvoid get_text_string_from_centered_panel(char const *const prompt, char *dest, int len)\n{\n WINDOW *panel_win;\n PANEL *the_panel;\n int panel_height=6,panel_width;\n /* char *dest = malloc(100); */\n\n int promptlen = strlen(prompt);\n panel_width = MAX(30, promptlen+5);\n\n /* Create the window to hold the panel */\n panel_win = newwin(panel_height,\n panel_width,\n (LINES-panel_height)/2,\n (COLS-panel_width)/2);\n box(panel_win, 0, 0);\n print_in_middle(panel_win, 1,\n 0, panel_width,\n prompt, COLOR_PAIR(6));\n wattron(panel_win, COLOR_PAIR(5));\n mvwhline(panel_win, 3, 2, ' ', panel_width-4);\n curs_set(1); // make cursor visible\n echo();\n mvwgetnstr(panel_win, 3, 2, dest, len);\n noecho();\n curs_set(0); // make cursor invisible\n wattroff(panel_win, COLOR_PAIR(5));\n \n /* create the panel from our window */\n the_panel = new_panel(panel_win);\n top_panel(the_panel);\n update_panels();\n doupdate();\n\n del_panel(the_panel);\n update_panels();\n delwin(panel_win);\n doupdate();\n}\n\n\n/**\n * Constructs and returns a new Animation object. This doesn't load\n * the animation. That will be done at play time or you can call\n * load_animation to do it manually. The animation should be free'd by\n * the user.\n */\nAnimation *create_animation(char *loadFile)\n{\n Animation *anim = (Animation *)malloc(sizeof(Animation));\n KINDLY_DIE_IF_NULL(anim);\n anim->isLoaded = false;\n anim->loadFile = (char *) malloc(sizeof(char) * MAX_FILE_LEAF_NAME);\n KINDLY_DIE_IF_NULL(anim->loadFile);\n strcpy(anim->loadFile, loadFile);\n return anim;\n}\n\nvoid destroy_animation(Animation *anim)\n{\n for(int i=0; i < anim->numFrames; ++i) {\n free(anim->frames[i]);\n }\n free(anim->frames);\n free(anim->loadFile);\n free(anim);\n}\n\n\n/**\n * Loads up the animation. Make sure you set the `loadFile` attribute\n * of the Animation before calling this function.\n */\nvoid load_animation(Animation *anim)\n{\n FILE *fp;\n char msg[500], loadFileFullPath[MAX_FILE_FULL_PATH];\n char *line, *thisFrame;\n size_t len=0;\n ssize_t read;\n\n sprintf(loadFileFullPath, \"%s/animations/%s\", xstr(TERMSHIP_PATH), anim->loadFile);\n sprintf(msg, \"Loading animation file from %s...\\n\", loadFileFullPath);\n write_to_log(msg);\n\n fp = fopen(loadFileFullPath, \"r\");\n if (fp == NULL) {\n cleanup_ncurses();\n printf(\"couldn't open %s for reading...\\n\", loadFileFullPath);\n exit(EXIT_FAILURE);\n }\n\n /*** read the header lines: ***/\n /* First is the size (in lines) of each of the frames we're about to\n read */\n line = NULL;\n read = getline(&line, &len, fp);\n sscanf(line, \"%d\\n\", &(anim->height));\n free(line);\n sprintf(msg, \"%s has height %d\\n\", loadFileFullPath,\n anim->height);\n write_to_log(msg);\n /* Next is the total number of frames */\n line = NULL;\n read = getline(&line, &len, fp);\n sscanf(line, \"%d\\n\", &(anim->numFrames));\n free(line);\n sprintf(msg, \"%s has %d total frames\\n\", loadFileFullPath, anim->numFrames);\n write_to_log(msg);\n /* Next is the desired frame rate: */\n line = NULL;\n read = getline(&line, &len, fp);\n sscanf(line, \"%d\\n\", &(anim->fps));\n free(line);\n sprintf(msg, \"%s will run at %d fps\\n\", loadFileFullPath, anim->fps);\n write_to_log(msg);\n\n (void)read; /*compiler warnings*/\n\n /* Allocate space for the animation (the frames, not the actual lines quite yet): */\n anim->frames = (char **) malloc(sizeof(char *) * anim->numFrames);\n KINDLY_DIE_IF_NULL(anim->frames);\n thisFrame = (char *) malloc(sizeof(char) * anim->height * MAX_FRAME_WIDTH);\n KINDLY_DIE_IF_NULL(thisFrame);\n\n int max_width = 0;\n for (int i=0; i < anim->numFrames; ++i) {\n bool last_char_was_newline = false;\n int chars_read;\n for (chars_read=0; ; ++chars_read) {\n int ch = fgetc(fp);\n /* sprintf(msg, \"[%d] => %c\\n\", chars_read, (char)ch); */\n /* write_to_log(msg); */\n thisFrame[chars_read] = ch;\n if (ch == '\\n') {\n /* two newlines in a row. next frame. */\n if (last_char_was_newline)\n break;\n last_char_was_newline = true;\n } else {\n last_char_was_newline = false;\n }\n }\n thisFrame[chars_read-1] = '\\0'; /* overwriting the final newline */\n anim->frames[i] = (char *) malloc((sizeof(char) * chars_read)); /* don't need +1 because we truncated the last newline */\n KINDLY_DIE_IF_NULL(anim->frames[i]);\n strcpy(anim->frames[i], thisFrame);\n\n max_width = MAX(chars_read, max_width);\n\n } /*eo for each line*/\n\n free(thisFrame);\n\n anim->width = max_width;\n\n anim->isLoaded = true;\n}\n\n/**\n * Plays the specified animation. Loads it if necessary.\n */\nvoid play_animation\n(Animation *anim, char *subtitle, bool press_key_to_continue, bool hold_at_end)\n{\n static WINDOW *animation_window = NULL;\n static PANEL *animation_panel = NULL;\n static int anim_width;\n static int anim_height;\n char msg[500];\n (void)msg;\n char *hold_message = \"\\n(Press any key to continue)\";\n\n if (!anim->isLoaded) load_animation(anim);\n\n anim_width = anim->width;\n anim_height = anim->height;\n\n if (subtitle != NULL)\n anim_height++;\n\n\n\n for (int i=0; i < anim->numFrames; ++i) {\n char *theframe = anim->frames[i];\n if (subtitle != NULL) {\n theframe = (char *) malloc(strlen(anim->frames[i]) + strlen(subtitle) + 2); /* +2 for extra null and newline */\n KINDLY_DIE_IF_NULL(theframe);\n strcpy(theframe, anim->frames[i]);\n strcat(theframe, \"\\n\");\n strcat(theframe, subtitle);\n }\n\n show_message_box_win(&animation_window, &animation_panel,\n\t\t\t theframe, &anim_width, &anim_height);\n\n if (subtitle != NULL)\n free(theframe);\n\n if (press_key_to_continue) {\n /* no delay if press_key_to_continue is set: */\n nodelay(animation_window, true);\n if (ERR != wgetch(animation_window)) {\n\tnodelay(animation_window, false);\n\treturn;\n }\n nodelay(animation_window, false);\n }\n\n /* we assume the show_message_box_win takes 0 time */\n usleep( (1/(float)anim->fps) * 1000000 );\n }\n\n if (hold_at_end) {\n char *hold_frame = (char *) malloc(strlen(anim->frames[anim->numFrames-1]) + strlen(hold_message) + 1);\n KINDLY_DIE_IF_NULL(hold_frame);\n strcpy(hold_frame, anim->frames[anim->numFrames-1]);\n strcat(hold_frame, hold_message);\n show_message_box_win(&animation_window, &animation_panel,\n hold_frame, &anim_width, &anim_height);\n getch();\n free(hold_frame);\n }\n\n hide_message_box_win(&animation_window, &animation_panel);\n}\n\n\nvoid cleanup_ncurses()\n{\n endwin(); /* end curses mode */\n}\n\nvoid kindly_die(char *msg)\n{\n cleanup_ncurses();\n printf(msg);\n exit(-1);\n}\n"}}
-{"repo": "digitalbazaar/forge", "pr_number": 1077, "title": "In CTR mode, support to encrypt/decrypt from the middle of a message", "state": "open", "merged_at": null, "additions": 5, "deletions": 2, "files_changed": ["lib/cipherModes.js"], "files_before": {"lib/cipherModes.js": "/**\n * Supported cipher modes.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nforge.cipher = forge.cipher || {};\n\n// supported cipher modes\nvar modes = module.exports = forge.cipher.modes = forge.cipher.modes || {};\n\n/** Electronic codebook (ECB) (Don't use this; it's not secure) **/\n\nmodes.ecb = function(options) {\n options = options || {};\n this.name = 'ECB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.ecb.prototype.start = function(options) {};\n\nmodes.ecb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.ecb.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher-block Chaining (CBC) **/\n\nmodes.cbc = function(options) {\n options = options || {};\n this.name = 'CBC';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.cbc.prototype.start = function(options) {\n // Note: legacy support for using IV residue (has security flaws)\n // if IV is null, reuse block from previous processing\n if(options.iv === null) {\n // must have a previous block\n if(!this._prev) {\n throw new Error('Invalid IV parameter.');\n }\n this._iv = this._prev.slice(0);\n } else if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n } else {\n // save IV as \"previous\" block\n this._iv = transformIV(options.iv, this.blockSize);\n this._prev = this._iv.slice(0);\n }\n};\n\nmodes.cbc.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n // CBC XOR's IV (or previous block) with plaintext\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._prev[i] ^ input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output, save previous block\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n this._prev = this._outBlock;\n};\n\nmodes.cbc.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output, save previous ciphered block\n // CBC XOR's IV (or previous block) with ciphertext\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._prev[i] ^ this._outBlock[i]);\n }\n this._prev = this._inBlock.slice(0);\n};\n\nmodes.cbc.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.cbc.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher feedback (CFB) **/\n\nmodes.cfb = function(options) {\n options = options || {};\n this.name = 'CFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32() ^ this._outBlock[i];\n output.putInt32(this._inBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32() ^ this._outBlock[i];\n this._partialOutput.putInt32(this._partialBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n output.putInt32(this._inBlock[i] ^ this._outBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32();\n this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\n/** Output feedback (OFB) **/\n\nmodes.ofb = function(options) {\n options = options || {};\n this.name = 'OFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(input.length() === 0) {\n return true;\n }\n\n // encrypt block (OFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output and update next input\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n this._inBlock[i] = this._outBlock[i];\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._outBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt;\n\n/** Counter (CTR) **/\n\nmodes.ctr = function(options) {\n options = options || {};\n this.name = 'CTR';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CTR always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // block complete, increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt;\n\n/** Galois/Counter Mode (GCM) **/\n\nmodes.gcm = function(options) {\n options = options || {};\n this.name = 'GCM';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n\n // R is actually this value concatenated with 120 more zero bits, but\n // we only XOR against R so the other zeros have no effect -- we just\n // apply this value to the first integer in a block\n this._R = 0xE1000000;\n};\n\nmodes.gcm.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // ensure IV is a byte buffer\n var iv = forge.util.createBuffer(options.iv);\n\n // no ciphered data processed yet\n this._cipherLength = 0;\n\n // default additional data is none\n var additionalData;\n if('additionalData' in options) {\n additionalData = forge.util.createBuffer(options.additionalData);\n } else {\n additionalData = forge.util.createBuffer();\n }\n\n // default tag length is 128 bits\n if('tagLength' in options) {\n this._tagLength = options.tagLength;\n } else {\n this._tagLength = 128;\n }\n\n // if tag is given, ensure tag matches tag length\n this._tag = null;\n if(options.decrypt) {\n // save tag to check later\n this._tag = forge.util.createBuffer(options.tag).getBytes();\n if(this._tag.length !== (this._tagLength / 8)) {\n throw new Error('Authentication tag does not match tag length.');\n }\n }\n\n // create tmp storage for hash calculation\n this._hashBlock = new Array(this._ints);\n\n // no tag generated yet\n this.tag = null;\n\n // generate hash subkey\n // (apply block cipher to \"zero\" block)\n this._hashSubkey = new Array(this._ints);\n this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey);\n\n // generate table M\n // use 4-bit tables (32 component decomposition of a 16 byte value)\n // 8-bit tables take more space and are known to have security\n // vulnerabilities (in native implementations)\n this.componentBits = 4;\n this._m = this.generateHashTable(this._hashSubkey, this.componentBits);\n\n // Note: support IV length different from 96 bits? (only supporting\n // 96 bits is recommended by NIST SP-800-38D)\n // generate J_0\n var ivLength = iv.length();\n if(ivLength === 12) {\n // 96-bit IV\n this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1];\n } else {\n // IV is NOT 96-bits\n this._j0 = [0, 0, 0, 0];\n while(iv.length() > 0) {\n this._j0 = this.ghash(\n this._hashSubkey, this._j0,\n [iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]);\n }\n this._j0 = this.ghash(\n this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8)));\n }\n\n // generate ICB (initial counter block)\n this._inBlock = this._j0.slice(0);\n inc32(this._inBlock);\n this._partialBytes = 0;\n\n // consume authentication data\n additionalData = forge.util.createBuffer(additionalData);\n // save additional data length as a BE 64-bit number\n this._aDataLength = from64To32(additionalData.length() * 8);\n // pad additional data to 128 bit (16 byte) block size\n var overflow = additionalData.length() % this.blockSize;\n if(overflow) {\n additionalData.fillWithByte(0, this.blockSize - overflow);\n }\n this._s = [0, 0, 0, 0];\n while(additionalData.length() > 0) {\n this._s = this.ghash(this._hashSubkey, this._s, [\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32()\n ]);\n }\n};\n\nmodes.gcm.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^= input.getInt32());\n }\n this._cipherLength += this.blockSize;\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes <= 0 || finish) {\n // handle overflow prior to hashing\n if(finish) {\n // get block overflow\n var overflow = inputLength % this.blockSize;\n this._cipherLength += overflow;\n // truncate for hash function\n this._partialOutput.truncate(this.blockSize - overflow);\n } else {\n this._cipherLength += this.blockSize;\n }\n\n // get output block for hashing\n for(var i = 0; i < this._ints; ++i) {\n this._outBlock[i] = this._partialOutput.getInt32();\n }\n this._partialOutput.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n // block still incomplete, restore input buffer, get partial output,\n // and return early\n input.read -= this.blockSize;\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // update hash block S\n this._s = this.ghash(this._hashSubkey, this._s, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.gcm.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength < this.blockSize && !(finish && inputLength > 0)) {\n return true;\n }\n\n // encrypt block (GCM always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n\n // update hash block S\n this._hashBlock[0] = input.getInt32();\n this._hashBlock[1] = input.getInt32();\n this._hashBlock[2] = input.getInt32();\n this._hashBlock[3] = input.getInt32();\n this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock);\n\n // XOR hash input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^ this._hashBlock[i]);\n }\n\n // increment cipher data length\n if(inputLength < this.blockSize) {\n this._cipherLength += inputLength % this.blockSize;\n } else {\n this._cipherLength += this.blockSize;\n }\n};\n\nmodes.gcm.prototype.afterFinish = function(output, options) {\n var rval = true;\n\n // handle overflow\n if(options.decrypt && options.overflow) {\n output.truncate(this.blockSize - options.overflow);\n }\n\n // handle authentication tag\n this.tag = forge.util.createBuffer();\n\n // concatenate additional data length with cipher length\n var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8));\n\n // include lengths in hash\n this._s = this.ghash(this._hashSubkey, this._s, lengths);\n\n // do GCTR(J_0, S)\n var tag = [];\n this.cipher.encrypt(this._j0, tag);\n for(var i = 0; i < this._ints; ++i) {\n this.tag.putInt32(this._s[i] ^ tag[i]);\n }\n\n // trim tag to length\n this.tag.truncate(this.tag.length() % (this._tagLength / 8));\n\n // check authentication tag\n if(options.decrypt && this.tag.bytes() !== this._tag) {\n rval = false;\n }\n\n return rval;\n};\n\n/**\n * See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois\n * field multiplication. The field, GF(2^128), is defined by the polynomial:\n *\n * x^128 + x^7 + x^2 + x + 1\n *\n * Which is represented in little-endian binary form as: 11100001 (0xe1). When\n * the value of a coefficient is 1, a bit is set. The value R, is the\n * concatenation of this value and 120 zero bits, yielding a 128-bit value\n * which matches the block size.\n *\n * This function will multiply two elements (vectors of bytes), X and Y, in\n * the field GF(2^128). The result is initialized to zero. For each bit of\n * X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd)\n * by the current value of Y. For each bit, the value of Y will be raised by\n * a power of x (multiplied by the polynomial x). This can be achieved by\n * shifting Y once to the right. If the current value of Y, prior to being\n * multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial.\n * Otherwise, we must divide by R after shifting to find the remainder.\n *\n * @param x the first block to multiply by the second.\n * @param y the second block to multiply by the first.\n *\n * @return the block result of the multiplication.\n */\nmodes.gcm.prototype.multiply = function(x, y) {\n var z_i = [0, 0, 0, 0];\n var v_i = y.slice(0);\n\n // calculate Z_128 (block has 128 bits)\n for(var i = 0; i < 128; ++i) {\n // if x_i is 0, Z_{i+1} = Z_i (unchanged)\n // else Z_{i+1} = Z_i ^ V_i\n // get x_i by finding 32-bit int position, then left shift 1 by remainder\n var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32));\n if(x_i) {\n z_i[0] ^= v_i[0];\n z_i[1] ^= v_i[1];\n z_i[2] ^= v_i[2];\n z_i[3] ^= v_i[3];\n }\n\n // if LSB(V_i) is 1, V_i = V_i >> 1\n // else V_i = (V_i >> 1) ^ R\n this.pow(v_i, v_i);\n }\n\n return z_i;\n};\n\nmodes.gcm.prototype.pow = function(x, out) {\n // if LSB(x) is 1, x = x >>> 1\n // else x = (x >>> 1) ^ R\n var lsb = x[3] & 1;\n\n // always do x >>> 1:\n // starting with the rightmost integer, shift each integer to the right\n // one bit, pulling in the bit from the integer to the left as its top\n // most bit (do this for the last 3 integers)\n for(var i = 3; i > 0; --i) {\n out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31);\n }\n // shift the first integer normally\n out[0] = x[0] >>> 1;\n\n // if lsb was not set, then polynomial had a degree of 127 and doesn't\n // need to divided; otherwise, XOR with R to find the remainder; we only\n // need to XOR the first integer since R technically ends w/120 zero bits\n if(lsb) {\n out[0] ^= this._R;\n }\n};\n\nmodes.gcm.prototype.tableMultiply = function(x) {\n // assumes 4-bit tables are used\n var z = [0, 0, 0, 0];\n for(var i = 0; i < 32; ++i) {\n var idx = (i / 8) | 0;\n var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF;\n var ah = this._m[i][x_i];\n z[0] ^= ah[0];\n z[1] ^= ah[1];\n z[2] ^= ah[2];\n z[3] ^= ah[3];\n }\n return z;\n};\n\n/**\n * A continuing version of the GHASH algorithm that operates on a single\n * block. The hash block, last hash value (Ym) and the new block to hash\n * are given.\n *\n * @param h the hash block.\n * @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash.\n * @param x the block to hash.\n *\n * @return the hashed value (Ym).\n */\nmodes.gcm.prototype.ghash = function(h, y, x) {\n y[0] ^= x[0];\n y[1] ^= x[1];\n y[2] ^= x[2];\n y[3] ^= x[3];\n return this.tableMultiply(y);\n //return this.multiply(y, h);\n};\n\n/**\n * Precomputes a table for multiplying against the hash subkey. This\n * mechanism provides a substantial speed increase over multiplication\n * performed without a table. The table-based multiplication this table is\n * for solves X * H by multiplying each component of X by H and then\n * composing the results together using XOR.\n *\n * This function can be used to generate tables with different bit sizes\n * for the components, however, this implementation assumes there are\n * 32 components of X (which is a 16 byte vector), therefore each component\n * takes 4-bits (so the table is constructed with bits=4).\n *\n * @param h the hash subkey.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateHashTable = function(h, bits) {\n // TODO: There are further optimizations that would use only the\n // first table M_0 (or some variant) along with a remainder table;\n // this can be explored in the future\n var multiplier = 8 / bits;\n var perInt = 4 * multiplier;\n var size = 16 * multiplier;\n var m = new Array(size);\n for(var i = 0; i < size; ++i) {\n var tmp = [0, 0, 0, 0];\n var idx = (i / perInt) | 0;\n var shft = ((perInt - 1 - (i % perInt)) * bits);\n tmp[idx] = (1 << (bits - 1)) << shft;\n m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits);\n }\n return m;\n};\n\n/**\n * Generates a table for multiplying against the hash subkey for one\n * particular component (out of all possible component values).\n *\n * @param mid the pre-multiplied value for the middle key of the table.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateSubHashTable = function(mid, bits) {\n // compute the table quickly by minimizing the number of\n // POW operations -- they only need to be performed for powers of 2,\n // all other entries can be composed from those powers using XOR\n var size = 1 << bits;\n var half = size >>> 1;\n var m = new Array(size);\n m[half] = mid.slice(0);\n var i = half >>> 1;\n while(i > 0) {\n // raise m0[2 * i] and store in m0[i]\n this.pow(m[2 * i], m[i] = []);\n i >>= 1;\n }\n i = 2;\n while(i < half) {\n for(var j = 1; j < i; ++j) {\n var m_i = m[i];\n var m_j = m[j];\n m[i + j] = [\n m_i[0] ^ m_j[0],\n m_i[1] ^ m_j[1],\n m_i[2] ^ m_j[2],\n m_i[3] ^ m_j[3]\n ];\n }\n i *= 2;\n }\n m[0] = [0, 0, 0, 0];\n /* Note: We could avoid storing these by doing composition during multiply\n calculate top half using composition by speed is preferred. */\n for(i = half + 1; i < size; ++i) {\n var c = m[i ^ half];\n m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]];\n }\n return m;\n};\n\n/** Utility functions */\n\nfunction transformIV(iv, blockSize) {\n if(typeof iv === 'string') {\n // convert iv string into byte buffer\n iv = forge.util.createBuffer(iv);\n }\n\n if(forge.util.isArray(iv) && iv.length > 4) {\n // convert iv byte array into byte buffer\n var tmp = iv;\n iv = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n iv.putByte(tmp[i]);\n }\n }\n\n if(iv.length() < blockSize) {\n throw new Error(\n 'Invalid IV length; got ' + iv.length() +\n ' bytes and expected ' + blockSize + ' bytes.');\n }\n\n if(!forge.util.isArray(iv)) {\n // convert iv byte buffer into 32-bit integer array\n var ints = [];\n var blocks = blockSize / 4;\n for(var i = 0; i < blocks; ++i) {\n ints.push(iv.getInt32());\n }\n iv = ints;\n }\n\n return iv;\n}\n\nfunction inc32(block) {\n // increment last 32 bits of block only\n block[block.length - 1] = (block[block.length - 1] + 1) & 0xFFFFFFFF;\n}\n\nfunction from64To32(num) {\n // convert 64-bit number to two BE Int32s\n return [(num / 0x100000000) | 0, num & 0xFFFFFFFF];\n}\n"}, "files_after": {"lib/cipherModes.js": "/**\n * Supported cipher modes.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = require('./forge');\nrequire('./util');\n\nforge.cipher = forge.cipher || {};\n\n// supported cipher modes\nvar modes = module.exports = forge.cipher.modes = forge.cipher.modes || {};\n\n/** Electronic codebook (ECB) (Don't use this; it's not secure) **/\n\nmodes.ecb = function(options) {\n options = options || {};\n this.name = 'ECB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.ecb.prototype.start = function(options) {};\n\nmodes.ecb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n};\n\nmodes.ecb.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.ecb.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher-block Chaining (CBC) **/\n\nmodes.cbc = function(options) {\n options = options || {};\n this.name = 'CBC';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n};\n\nmodes.cbc.prototype.start = function(options) {\n // Note: legacy support for using IV residue (has security flaws)\n // if IV is null, reuse block from previous processing\n if(options.iv === null) {\n // must have a previous block\n if(!this._prev) {\n throw new Error('Invalid IV parameter.');\n }\n this._iv = this._prev.slice(0);\n } else if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n } else {\n // save IV as \"previous\" block\n this._iv = transformIV(options.iv, this.blockSize);\n this._prev = this._iv.slice(0);\n }\n};\n\nmodes.cbc.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n // CBC XOR's IV (or previous block) with plaintext\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._prev[i] ^ input.getInt32();\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // write output, save previous block\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i]);\n }\n this._prev = this._outBlock;\n};\n\nmodes.cbc.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n if(input.length() < this.blockSize && !(finish && input.length() > 0)) {\n return true;\n }\n\n // get next block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n }\n\n // decrypt block\n this.cipher.decrypt(this._inBlock, this._outBlock);\n\n // write output, save previous ciphered block\n // CBC XOR's IV (or previous block) with ciphertext\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._prev[i] ^ this._outBlock[i]);\n }\n this._prev = this._inBlock.slice(0);\n};\n\nmodes.cbc.prototype.pad = function(input, options) {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (input.length() === this.blockSize ?\n this.blockSize : (this.blockSize - input.length()));\n input.fillWithByte(padding, padding);\n return true;\n};\n\nmodes.cbc.prototype.unpad = function(output, options) {\n // check for error: input data not a multiple of blockSize\n if(options.overflow > 0) {\n return false;\n }\n\n // ensure padding byte count is valid\n var len = output.length();\n var count = output.at(len - 1);\n if(count > (this.blockSize << 2)) {\n return false;\n }\n\n // trim off padding bytes\n output.truncate(count);\n return true;\n};\n\n/** Cipher feedback (CFB) **/\n\nmodes.cfb = function(options) {\n options = options || {};\n this.name = 'CFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32() ^ this._outBlock[i];\n output.putInt32(this._inBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32() ^ this._outBlock[i];\n this._partialOutput.putInt32(this._partialBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.cfb.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output, write input as output\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = input.getInt32();\n output.putInt32(this._inBlock[i] ^ this._outBlock[i]);\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output, write input as partial output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialBlock[i] = input.getInt32();\n this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._partialBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\n/** Output feedback (OFB) **/\n\nmodes.ofb = function(options) {\n options = options || {};\n this.name = 'OFB';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(input.length() === 0) {\n return true;\n }\n\n // encrypt block (OFB always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output and update next input\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n this._inBlock[i] = this._outBlock[i];\n }\n return;\n }\n\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n } else {\n // block complete, update input block\n for(var i = 0; i < this._ints; ++i) {\n this._inBlock[i] = this._outBlock[i];\n }\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n};\n\nmodes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt;\n\n/** Counter (CTR) **/\n\nmodes.ctr = function(options) {\n options = options || {};\n this.name = 'CTR';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = null;\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // use IV as first input\n this._iv = transformIV(options.iv, this.blockSize);\n this._inBlock = this._iv.slice(0);\n if(options.counter) {\n inc32(this._inBlock, options.counter);\n }\n this._partialBytes = 0;\n};\n\nmodes.ctr.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block (CTR always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes > 0) {\n // block still incomplete, restore input buffer\n input.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // block complete, increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt;\n\n/** Galois/Counter Mode (GCM) **/\n\nmodes.gcm = function(options) {\n options = options || {};\n this.name = 'GCM';\n this.cipher = options.cipher;\n this.blockSize = options.blockSize || 16;\n this._ints = this.blockSize / 4;\n this._inBlock = new Array(this._ints);\n this._outBlock = new Array(this._ints);\n this._partialOutput = forge.util.createBuffer();\n this._partialBytes = 0;\n\n // R is actually this value concatenated with 120 more zero bits, but\n // we only XOR against R so the other zeros have no effect -- we just\n // apply this value to the first integer in a block\n this._R = 0xE1000000;\n};\n\nmodes.gcm.prototype.start = function(options) {\n if(!('iv' in options)) {\n throw new Error('Invalid IV parameter.');\n }\n // ensure IV is a byte buffer\n var iv = forge.util.createBuffer(options.iv);\n\n // no ciphered data processed yet\n this._cipherLength = 0;\n\n // default additional data is none\n var additionalData;\n if('additionalData' in options) {\n additionalData = forge.util.createBuffer(options.additionalData);\n } else {\n additionalData = forge.util.createBuffer();\n }\n\n // default tag length is 128 bits\n if('tagLength' in options) {\n this._tagLength = options.tagLength;\n } else {\n this._tagLength = 128;\n }\n\n // if tag is given, ensure tag matches tag length\n this._tag = null;\n if(options.decrypt) {\n // save tag to check later\n this._tag = forge.util.createBuffer(options.tag).getBytes();\n if(this._tag.length !== (this._tagLength / 8)) {\n throw new Error('Authentication tag does not match tag length.');\n }\n }\n\n // create tmp storage for hash calculation\n this._hashBlock = new Array(this._ints);\n\n // no tag generated yet\n this.tag = null;\n\n // generate hash subkey\n // (apply block cipher to \"zero\" block)\n this._hashSubkey = new Array(this._ints);\n this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey);\n\n // generate table M\n // use 4-bit tables (32 component decomposition of a 16 byte value)\n // 8-bit tables take more space and are known to have security\n // vulnerabilities (in native implementations)\n this.componentBits = 4;\n this._m = this.generateHashTable(this._hashSubkey, this.componentBits);\n\n // Note: support IV length different from 96 bits? (only supporting\n // 96 bits is recommended by NIST SP-800-38D)\n // generate J_0\n var ivLength = iv.length();\n if(ivLength === 12) {\n // 96-bit IV\n this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1];\n } else {\n // IV is NOT 96-bits\n this._j0 = [0, 0, 0, 0];\n while(iv.length() > 0) {\n this._j0 = this.ghash(\n this._hashSubkey, this._j0,\n [iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]);\n }\n this._j0 = this.ghash(\n this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8)));\n }\n\n // generate ICB (initial counter block)\n this._inBlock = this._j0.slice(0);\n inc32(this._inBlock);\n this._partialBytes = 0;\n\n // consume authentication data\n additionalData = forge.util.createBuffer(additionalData);\n // save additional data length as a BE 64-bit number\n this._aDataLength = from64To32(additionalData.length() * 8);\n // pad additional data to 128 bit (16 byte) block size\n var overflow = additionalData.length() % this.blockSize;\n if(overflow) {\n additionalData.fillWithByte(0, this.blockSize - overflow);\n }\n this._s = [0, 0, 0, 0];\n while(additionalData.length() > 0) {\n this._s = this.ghash(this._hashSubkey, this._s, [\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32(),\n additionalData.getInt32()\n ]);\n }\n};\n\nmodes.gcm.prototype.encrypt = function(input, output, finish) {\n // not enough input to encrypt\n var inputLength = input.length();\n if(inputLength === 0) {\n return true;\n }\n\n // encrypt block\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // handle full block\n if(this._partialBytes === 0 && inputLength >= this.blockSize) {\n // XOR input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^= input.getInt32());\n }\n this._cipherLength += this.blockSize;\n } else {\n // handle partial block\n var partialBytes = (this.blockSize - inputLength) % this.blockSize;\n if(partialBytes > 0) {\n partialBytes = this.blockSize - partialBytes;\n }\n\n // XOR input with output\n this._partialOutput.clear();\n for(var i = 0; i < this._ints; ++i) {\n this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);\n }\n\n if(partialBytes <= 0 || finish) {\n // handle overflow prior to hashing\n if(finish) {\n // get block overflow\n var overflow = inputLength % this.blockSize;\n this._cipherLength += overflow;\n // truncate for hash function\n this._partialOutput.truncate(this.blockSize - overflow);\n } else {\n this._cipherLength += this.blockSize;\n }\n\n // get output block for hashing\n for(var i = 0; i < this._ints; ++i) {\n this._outBlock[i] = this._partialOutput.getInt32();\n }\n this._partialOutput.read -= this.blockSize;\n }\n\n // skip any previous partial bytes\n if(this._partialBytes > 0) {\n this._partialOutput.getBytes(this._partialBytes);\n }\n\n if(partialBytes > 0 && !finish) {\n // block still incomplete, restore input buffer, get partial output,\n // and return early\n input.read -= this.blockSize;\n output.putBytes(this._partialOutput.getBytes(\n partialBytes - this._partialBytes));\n this._partialBytes = partialBytes;\n return true;\n }\n\n output.putBytes(this._partialOutput.getBytes(\n inputLength - this._partialBytes));\n this._partialBytes = 0;\n }\n\n // update hash block S\n this._s = this.ghash(this._hashSubkey, this._s, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n};\n\nmodes.gcm.prototype.decrypt = function(input, output, finish) {\n // not enough input to decrypt\n var inputLength = input.length();\n if(inputLength < this.blockSize && !(finish && inputLength > 0)) {\n return true;\n }\n\n // encrypt block (GCM always uses encryption mode)\n this.cipher.encrypt(this._inBlock, this._outBlock);\n\n // increment counter (input block)\n inc32(this._inBlock);\n\n // update hash block S\n this._hashBlock[0] = input.getInt32();\n this._hashBlock[1] = input.getInt32();\n this._hashBlock[2] = input.getInt32();\n this._hashBlock[3] = input.getInt32();\n this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock);\n\n // XOR hash input with output\n for(var i = 0; i < this._ints; ++i) {\n output.putInt32(this._outBlock[i] ^ this._hashBlock[i]);\n }\n\n // increment cipher data length\n if(inputLength < this.blockSize) {\n this._cipherLength += inputLength % this.blockSize;\n } else {\n this._cipherLength += this.blockSize;\n }\n};\n\nmodes.gcm.prototype.afterFinish = function(output, options) {\n var rval = true;\n\n // handle overflow\n if(options.decrypt && options.overflow) {\n output.truncate(this.blockSize - options.overflow);\n }\n\n // handle authentication tag\n this.tag = forge.util.createBuffer();\n\n // concatenate additional data length with cipher length\n var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8));\n\n // include lengths in hash\n this._s = this.ghash(this._hashSubkey, this._s, lengths);\n\n // do GCTR(J_0, S)\n var tag = [];\n this.cipher.encrypt(this._j0, tag);\n for(var i = 0; i < this._ints; ++i) {\n this.tag.putInt32(this._s[i] ^ tag[i]);\n }\n\n // trim tag to length\n this.tag.truncate(this.tag.length() % (this._tagLength / 8));\n\n // check authentication tag\n if(options.decrypt && this.tag.bytes() !== this._tag) {\n rval = false;\n }\n\n return rval;\n};\n\n/**\n * See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois\n * field multiplication. The field, GF(2^128), is defined by the polynomial:\n *\n * x^128 + x^7 + x^2 + x + 1\n *\n * Which is represented in little-endian binary form as: 11100001 (0xe1). When\n * the value of a coefficient is 1, a bit is set. The value R, is the\n * concatenation of this value and 120 zero bits, yielding a 128-bit value\n * which matches the block size.\n *\n * This function will multiply two elements (vectors of bytes), X and Y, in\n * the field GF(2^128). The result is initialized to zero. For each bit of\n * X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd)\n * by the current value of Y. For each bit, the value of Y will be raised by\n * a power of x (multiplied by the polynomial x). This can be achieved by\n * shifting Y once to the right. If the current value of Y, prior to being\n * multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial.\n * Otherwise, we must divide by R after shifting to find the remainder.\n *\n * @param x the first block to multiply by the second.\n * @param y the second block to multiply by the first.\n *\n * @return the block result of the multiplication.\n */\nmodes.gcm.prototype.multiply = function(x, y) {\n var z_i = [0, 0, 0, 0];\n var v_i = y.slice(0);\n\n // calculate Z_128 (block has 128 bits)\n for(var i = 0; i < 128; ++i) {\n // if x_i is 0, Z_{i+1} = Z_i (unchanged)\n // else Z_{i+1} = Z_i ^ V_i\n // get x_i by finding 32-bit int position, then left shift 1 by remainder\n var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32));\n if(x_i) {\n z_i[0] ^= v_i[0];\n z_i[1] ^= v_i[1];\n z_i[2] ^= v_i[2];\n z_i[3] ^= v_i[3];\n }\n\n // if LSB(V_i) is 1, V_i = V_i >> 1\n // else V_i = (V_i >> 1) ^ R\n this.pow(v_i, v_i);\n }\n\n return z_i;\n};\n\nmodes.gcm.prototype.pow = function(x, out) {\n // if LSB(x) is 1, x = x >>> 1\n // else x = (x >>> 1) ^ R\n var lsb = x[3] & 1;\n\n // always do x >>> 1:\n // starting with the rightmost integer, shift each integer to the right\n // one bit, pulling in the bit from the integer to the left as its top\n // most bit (do this for the last 3 integers)\n for(var i = 3; i > 0; --i) {\n out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31);\n }\n // shift the first integer normally\n out[0] = x[0] >>> 1;\n\n // if lsb was not set, then polynomial had a degree of 127 and doesn't\n // need to divided; otherwise, XOR with R to find the remainder; we only\n // need to XOR the first integer since R technically ends w/120 zero bits\n if(lsb) {\n out[0] ^= this._R;\n }\n};\n\nmodes.gcm.prototype.tableMultiply = function(x) {\n // assumes 4-bit tables are used\n var z = [0, 0, 0, 0];\n for(var i = 0; i < 32; ++i) {\n var idx = (i / 8) | 0;\n var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF;\n var ah = this._m[i][x_i];\n z[0] ^= ah[0];\n z[1] ^= ah[1];\n z[2] ^= ah[2];\n z[3] ^= ah[3];\n }\n return z;\n};\n\n/**\n * A continuing version of the GHASH algorithm that operates on a single\n * block. The hash block, last hash value (Ym) and the new block to hash\n * are given.\n *\n * @param h the hash block.\n * @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash.\n * @param x the block to hash.\n *\n * @return the hashed value (Ym).\n */\nmodes.gcm.prototype.ghash = function(h, y, x) {\n y[0] ^= x[0];\n y[1] ^= x[1];\n y[2] ^= x[2];\n y[3] ^= x[3];\n return this.tableMultiply(y);\n //return this.multiply(y, h);\n};\n\n/**\n * Precomputes a table for multiplying against the hash subkey. This\n * mechanism provides a substantial speed increase over multiplication\n * performed without a table. The table-based multiplication this table is\n * for solves X * H by multiplying each component of X by H and then\n * composing the results together using XOR.\n *\n * This function can be used to generate tables with different bit sizes\n * for the components, however, this implementation assumes there are\n * 32 components of X (which is a 16 byte vector), therefore each component\n * takes 4-bits (so the table is constructed with bits=4).\n *\n * @param h the hash subkey.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateHashTable = function(h, bits) {\n // TODO: There are further optimizations that would use only the\n // first table M_0 (or some variant) along with a remainder table;\n // this can be explored in the future\n var multiplier = 8 / bits;\n var perInt = 4 * multiplier;\n var size = 16 * multiplier;\n var m = new Array(size);\n for(var i = 0; i < size; ++i) {\n var tmp = [0, 0, 0, 0];\n var idx = (i / perInt) | 0;\n var shft = ((perInt - 1 - (i % perInt)) * bits);\n tmp[idx] = (1 << (bits - 1)) << shft;\n m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits);\n }\n return m;\n};\n\n/**\n * Generates a table for multiplying against the hash subkey for one\n * particular component (out of all possible component values).\n *\n * @param mid the pre-multiplied value for the middle key of the table.\n * @param bits the bit size for a component.\n */\nmodes.gcm.prototype.generateSubHashTable = function(mid, bits) {\n // compute the table quickly by minimizing the number of\n // POW operations -- they only need to be performed for powers of 2,\n // all other entries can be composed from those powers using XOR\n var size = 1 << bits;\n var half = size >>> 1;\n var m = new Array(size);\n m[half] = mid.slice(0);\n var i = half >>> 1;\n while(i > 0) {\n // raise m0[2 * i] and store in m0[i]\n this.pow(m[2 * i], m[i] = []);\n i >>= 1;\n }\n i = 2;\n while(i < half) {\n for(var j = 1; j < i; ++j) {\n var m_i = m[i];\n var m_j = m[j];\n m[i + j] = [\n m_i[0] ^ m_j[0],\n m_i[1] ^ m_j[1],\n m_i[2] ^ m_j[2],\n m_i[3] ^ m_j[3]\n ];\n }\n i *= 2;\n }\n m[0] = [0, 0, 0, 0];\n /* Note: We could avoid storing these by doing composition during multiply\n calculate top half using composition by speed is preferred. */\n for(i = half + 1; i < size; ++i) {\n var c = m[i ^ half];\n m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]];\n }\n return m;\n};\n\n/** Utility functions */\n\nfunction transformIV(iv, blockSize) {\n if(typeof iv === 'string') {\n // convert iv string into byte buffer\n iv = forge.util.createBuffer(iv);\n }\n\n if(forge.util.isArray(iv) && iv.length > 4) {\n // convert iv byte array into byte buffer\n var tmp = iv;\n iv = forge.util.createBuffer();\n for(var i = 0; i < tmp.length; ++i) {\n iv.putByte(tmp[i]);\n }\n }\n\n if(iv.length() < blockSize) {\n throw new Error(\n 'Invalid IV length; got ' + iv.length() +\n ' bytes and expected ' + blockSize + ' bytes.');\n }\n\n if(!forge.util.isArray(iv)) {\n // convert iv byte buffer into 32-bit integer array\n var ints = [];\n var blocks = blockSize / 4;\n for(var i = 0; i < blocks; ++i) {\n ints.push(iv.getInt32());\n }\n iv = ints;\n }\n\n return iv;\n}\n\nfunction inc32(block, count) {\n // increment last 32 bits of block only\n block[block.length - 1] = (block[block.length - 1] + (count || 1)) & 0xFFFFFFFF;\n}\n\nfunction from64To32(num) {\n // convert 64-bit number to two BE Int32s\n return [(num / 0x100000000) | 0, num & 0xFFFFFFFF];\n}\n"}}
-{"repo": "FrozenCanuck/Ki", "pr_number": 25, "title": "Event handlers in states with concurrent substates get triggered multiple times", "state": "closed", "merged_at": null, "additions": 25, "deletions": 4, "files_changed": ["frameworks/foundation/system/statechart.js", "frameworks/foundation/tests/event_handling/basic/with_concurrent_states.js"], "files_before": {"frameworks/foundation/system/statechart.js": "// ==========================================================================\n// Project: Ki - A Statechart Framework for SproutCore\n// License: Licensed under MIT license (see license.js)\n// ==========================================================================\n\n/*globals Ki */\n\nsc_require('system/state');\n\n/**\n The startchart manager mixin allows an object to be a statechart. By becoming a statechart, the\n object can then be manage a set of its own states.\n \n This implemention of the statechart manager closely follows the concepts stated in D. Harel's \n original paper \"Statecharts: A Visual Formalism For Complex Systems\" \n (www.wisdom.weizmann.ac.il/~harel/papers/Statecharts.pdf). \n \n The statechart allows for complex state heircharies by nesting states within states, and \n allows for state orthogonality based on the use of concurrent states.\n \n At minimum, a statechart must have one state: The root state. All other states in the statechart\n are a decendents (substates) of the root state.\n \n The following example shows how states are nested within a statechart:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n initialSubstate: 'stateA',\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n })\n \n })\n \n }}}\n \n Note how in the example above, the root state as an explicit initial substate to enter into. If no\n initial substate is provided, then the statechart will default to the the state's first substate.\n \n You can also defined states without explicitly defining the root state. To do so, simply create properties\n on your object that represents states. Upon initialization, a root state will be constructed automatically\n by the mixin and make the states on the object substates of the root state. As an example:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n initialState: 'stateA',\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n \n })\n \n }}} \n \n If you liked to specify a class that should be used as the root state but using the above method to defined\n states, you can set the rootStateExample property with a class that extends from Ki.State. If the \n rootStateExaple property is not explicitly assigned the then default class used will be Ki.State.\n \n To provide your statechart with orthogonality, you use concurrent states. If you use concurrent states,\n then your statechart will have multiple current states. That is because each concurrent state represents an\n independent state structure from other concurrent states. The following example shows how to provide your\n statechart with concurrent states:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n substatesAreConcurrent: YES,\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n })\n \n })\n \n }}}\n \n Above, to indicate that a state's substates are concurrent, you just have to set the substatesAreConcurrent to \n YES. Once done, then stateA and stateB will be independent of each other and each will manage their\n own current substates. The root state will then have more then one current substate.\n \n To define concurrent states directly on the object without explicitly defining a root, you can do the \n following:\n \n {{{\n\n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n statesAreConcurrent: YES,\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n \n })\n\n }}}\n \n Remember that a startchart can have a mixture of nested and concurrent states in order for you to \n create as complex of statecharts that suite your needs. Here is an example of a mixed state structure:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n initialSubstate: 'stateA',\n \n stateA: Ki.State.design({\n \n substatesAreConcurrent: YES,\n \n stateM: Ki.State.design({ ... })\n stateN: Ki.State.design({ ... })\n stateO: Ki.State.design({ ... })\n \n }),\n \n stateB: Ki.State.design({\n \n initialSubstate: 'stateX',\n \n stateX: Ki.State.design({ ... })\n stateY: Ki.State.desgin({ ... })\n \n })\n })\n \n })\n \n }}}\n \n Depending on your needs, a statechart can have lots of states, which can become hard to manage all within\n one file. To modularize your states and make them easier to manage and maintain, you can plug-in states\n into other states. Let's say we are using the statechart in the last example above, and all the code is \n within one file. We could update the code and split the logic across two or more files like so:\n \n {{{\n ---- state_a.js\n \n MyApp.StateA = Ki.State.extend({\n \n substatesAreConcurrent: YES,\n \n stateM: Ki.State.design({ ... })\n stateN: Ki.State.design({ ... })\n stateO: Ki.State.design({ ... })\n \n });\n \n ---- state_b.js\n \n MyApp.StateB = Ki.State.extend({\n \n substatesAreConcurrent: YES,\n \n stateM: Ki.State.design({ ... })\n stateN: Ki.State.design({ ... })\n stateO: Ki.State.design({ ... })\n \n });\n \n ---- statechart.js\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n initialSubstate: 'stateA',\n \n stateA: Ki.State.plugin('MyApp.StateA'),\n \n stateB: Ki.State.plugin('MyApp.StateB')\n \n })\n \n })\n \n }}}\n \n Using state plug-in functionality is optional. If you use the plug-in feature you can break up your statechart\n into as many files as you see fit.\n\n*/\n\nKi.StatechartManager = {\n \n // Walk like a duck\n isResponderContext: YES,\n \n // Walk like a duck\n isStatechart: YES,\n \n /**\n Indicates if this statechart has been initialized\n\n @property {Boolean}\n */\n statechartIsInitialized: NO,\n \n /**\n The root state of this statechart. All statecharts must have a root state.\n \n If this property is left unassigned then when the statechart is initialized\n it will used the rootStateExample, initialState, and statesAreConcurrent\n properties to construct a root state.\n \n @see #rootStateExample\n @see #initialState\n @see #statesAreConcurrent\n \n @property {Ki.State}\n */\n rootState: null,\n \n /** \n Represents the class used to construct a class that will be the root state for\n this statechart. The class assigned must derive from Ki.State. \n \n This property will only be used if the rootState property is not assigned.\n \n @see #rootState\n \n @property {Ki.State}\n */\n rootStateExample: Ki.State,\n \n /** \n Indicates what state should be the intiail state of this statechart. The value\n assigned must be the name of a property on this object that represents a state.\n As well, the statesAreConcurrent must be set to NO.\n \n This property will only be used if the rootState property is not assigned.\n \n @see #rootState\n \n @property {String} \n */\n initialState: null,\n \n /** \n Indicates if properties on this object representing states are concurrent to each other.\n If YES then they are concurrent, otherwise they are not. If the YES, then the\n initialState property must not be assigned.\n \n This property will only be used if the rootState property is not assigned.\n \n @see #rootState\n \n @property {Boolean}\n */\n statesAreConcurrent: NO,\n \n /** \n Indicates whether to use a monitor to monitor that statechart's activities. If true then\n the monitor will be active, otherwise the monitor will not be used. Useful for debugging\n purposes.\n \n @property {Boolean}\n */\n monitorIsActive: NO,\n \n /**\n A statechart monitor that can be used to monitor this statechart. Useful for debugging purposes.\n A monitor will only be used if monitorIsActive is true.\n \n @property {Ki.StatechartMonitor}\n */\n monitor: null,\n \n /**\n Used to specify what property (key) on the statechart should be used as the trace property. By\n default the property is 'trace'.\n\n @property {String}\n */\n statechartTraceKey: 'trace',\n\n /**\n Indicates whether to trace the statecharts activities. If true then the statechart will output\n its activites to the browser's JS console. Useful for debugging purposes.\n\n @see #statechartTraceKey\n\n @property {Boolean}\n */\n trace: NO,\n \n /**\n Used to specify what property (key) on the statechart should be used as the owner property. By\n default the property is 'owner'.\n\n @property {String}\n */\n statechartOwnerKey: 'owner',\n\n /**\n Sets who the owner is of this statechart. If null then the owner is this object otherwise\n the owner is the assigned object. \n\n @see #statechartOwnerKey\n\n @property {SC.Object}\n */\n owner: null,\n\n /** \n Indicates if the statechart should be automatically initialized by this\n object after it has been created. If YES then initStatechart will be\n called automatically, otherwise it will not.\n \n @property {Boolean}\n */\n autoInitStatechart: YES,\n \n /**\n If yes, any warning messages produced by the statechart or any of its states will\n not be logged, otherwise all warning messages will be logged. \n \n While designing and debugging your statechart, it's best to keep this value false.\n In production you can then suppress the warning messages.\n \n @property {Boolean}\n */\n suppressStatechartWarnings: NO,\n \n initMixin: function() {\n if (this.get('autoInitStatechart')) {\n this.initStatechart();\n }\n },\n \n destroyMixin: function() {\n var root = this.get('rootState'),\n traceKey = this.get('statechartTraceKey');\n\n this.removeObserver(traceKey, this, '_statechartTraceDidChange');\n\n root.destroy();\n this.set('rootState', null);\n },\n\n /**\n Initializes the statechart. By initializing the statechart, it will create all the states and register\n them with the statechart. Once complete, the statechart can be used to go to states and send events to.\n */\n initStatechart: function() {\n if (this.get('statechartIsInitialized')) return;\n \n this._gotoStateLocked = NO;\n this._sendEventLocked = NO;\n this._pendingStateTransitions = [];\n this._pendingSentEvents = [];\n \n this.sendAction = this.sendEvent;\n \n if (this.get('monitorIsActive')) {\n this.set('monitor', Ki.StatechartMonitor.create());\n }\n\n var traceKey = this.get('statechartTraceKey');\n\n this.addObserver(traceKey, this, '_statechartTraceDidChange');\n this._statechartTraceDidChange();\n\n var trace = this.get('allowTracing'),\n rootState = this.get('rootState'),\n msg;\n \n if (trace) this.statechartLogTrace(\"BEGIN initialize statechart\");\n \n // If no root state was explicitly defined then try to construct\n // a root state class\n if (!rootState) {\n rootState = this._constructRootStateClass();\n }\n else if (SC.typeOf(rootState) === SC.T_FUNCTION && rootState.statePlugin) {\n rootState = rootState.apply(this);\n }\n \n if (!(SC.kindOf(rootState, Ki.State) && rootState.isClass)) {\n msg = \"Unable to initialize statechart. Root state must be a state class\";\n this.statechartLogError(msg);\n throw msg;\n }\n \n rootState = this.createRootState(rootState, { \n statechart: this, \n name: Ki.ROOT_STATE_NAME \n });\n \n this.set('rootState', rootState);\n rootState.initState();\n \n if (SC.kindOf(rootState.get('initialSubstate'), Ki.EmptyState)) {\n msg = \"Unable to initialize statechart. Root state must have an initial substate explicilty defined\";\n this.statechartLogError(msg);\n throw msg;\n }\n \n if (!SC.empty(this.get('initialState'))) {\n var key = 'initialState';\n this.set(key, rootState.get(this.get(key)));\n } \n \n this.set('statechartIsInitialized', YES);\n this.gotoState(rootState);\n \n if (trace) this.statechartLogTrace(\"END initialize statechart\");\n },\n \n /**\n Will create a root state for the statechart\n */\n createRootState: function(state, attrs) {\n if (!attrs) attrs = {};\n state = state.create(attrs);\n return state;\n },\n \n /**\n Returns an array of all the current states for this statechart\n \n @returns {Array} the current states\n */\n currentStates: function() {\n return this.getPath('rootState.currentSubstates');\n }.property().cacheable(),\n \n /**\n Returns the first current state for this statechart. \n \n @return {Ki.State}\n */\n firstCurrentState: function() {\n var cs = this.get('currentStates');\n return cs ? cs.objectAt(0) : null;\n }.property('currentStates').cacheable(),\n \n /**\n Returns the count of the current states for this statechart\n \n @returns {Number} the count \n */\n currentStateCount: function() {\n return this.getPath('currentStates.length');\n }.property('currentStates').cacheable(),\n \n /**\n Checks if a given state is a current state of this statechart. \n \n @param state {State} the state to check\n @returns {Boolean} true if the state is a current state, otherwise fals is returned\n */\n stateIsCurrentState: function(state) {\n return this.get('rootState').stateIsCurrentSubstate(state);\n },\n \n /**\n Checks if the given value represents a state is this statechart\n \n @param value {State|String} either a state object or the name of a state\n @returns {Boolean} true if the state does belong ot the statechart, otherwise false is returned\n */\n doesContainState: function(value) {\n return !SC.none(this.getState(value));\n },\n \n /**\n Gets a state from the statechart that matches the given value\n \n @param value {State|String} either a state object of the name of a state\n @returns {State} if a match then the matching state is returned, otherwise null is returned \n */\n getState: function(value) {\n return this.get('rootState').getSubstate(value);\n },\n \n /**\n When called, the statechart will proceed with making state transitions in the statechart starting from \n a current state that meet the statechart conditions. When complete, some or all of the statechart's \n current states will be changed, and all states that were part of the transition process will either \n be exited or entered in a specific order.\n \n The state that is given to go to will not necessarily be a current state when the state transition process\n is complete. The final state or states are dependent on factors such an initial substates, concurrent \n states, and history states.\n \n Because the statechart can have one or more current states, it may be necessary to indicate what current state\n to start from. If no current state to start from is provided, then the statechart will default to using\n the first current state that it has; depending of the make up of the statechart (no concurrent state vs.\n with concurrent states), the outcome may be unexpected. For a statechart with concurrent states, it is best\n to provide a current state in which to start from.\n \n When using history states, the statechart will first make transitions to the given state and then use that\n state's history state and recursively follow each history state's history state until there are no \n more history states to follow. If the given state does not have a history state, then the statechart\n will continue following state transition procedures.\n \n Method can be called in the following ways:\n \n {{{\n \n // With one argument. \n gotoState()\n \n // With two argument.\n gotoState(, )\n \n // With three argument.\n gotoState(, , )\n gotoState(, , )\n \n // With four argument.\n gotoState(, , , )\n \n }}}\n \n where is either a Ki.State object or a string and is a regular JS hash object.\n \n @param state {Ki.State|String} the state to go to (may not be the final state in the transition process)\n @param fromCurrentState {Ki.State|String} Optional. The current state to start the transition process from.\n @param useHistory {Boolean} Optional. Indicates whether to include using history states in the transition process\n @param context {Hash} Optional. A context object that will be passed to all exited and entered states\n */\n gotoState: function(state, fromCurrentState, useHistory, context) {\n \n if (!this.get('statechartIsInitialized')) {\n this.statechartLogError(\"can not go to state %@. statechart has not yet been initialized\".fmt(state));\n return;\n }\n \n if (this.get('isDestroyed')) {\n this.statechartLogError(\"can not go to state %@. statechart is destroyed\".fmt(this));\n return;\n }\n \n var args = this._processGotoStateArgs(arguments);\n\n state = args.state;\n fromCurrentState = args.fromCurrentState;\n useHistory = args.useHistory;\n context = args.context;\n \n var pivotState = null,\n exitStates = [],\n enterStates = [],\n trace = this.get('allowTracing'),\n rootState = this.get('rootState'),\n paramState = state,\n paramFromCurrentState = fromCurrentState;\n \n state = rootState.getSubstate(state);\n \n if (SC.none(state)) {\n this.statechartLogError(\"Can not to goto state %@. Not a recognized state in statechart\".fmt(paramState));\n return;\n }\n \n if (this._gotoStateLocked) {\n // There is a state transition currently happening. Add this requested state\n // transition to the queue of pending state transitions. The request will\n // be invoked after the current state transition is finished.\n this._pendingStateTransitions.push({\n state: state,\n fromCurrentState: fromCurrentState,\n useHistory: useHistory,\n context: context\n });\n \n return;\n }\n \n // Lock the current state transition so that no other requested state transition \n // interferes. \n this._gotoStateLocked = YES;\n \n if (!SC.none(fromCurrentState)) {\n // Check to make sure the current state given is actually a current state of this statechart\n fromCurrentState = rootState.getSubstate(fromCurrentState);\n if (SC.none(fromCurrentState) || !fromCurrentState.get('isCurrentState')) {\n var msg = \"Can not to goto state %@. %@ is not a recognized current state in statechart\";\n this.statechartLogError(msg.fmt(paramState, paramFromCurrentState));\n this._gotoStateLocked = NO;\n return;\n }\n } \n else if (this.getPath('currentStates.length') > 0) {\n // No explicit current state to start from; therefore, just use the first current state as \n // a default, if there is a current state.\n fromCurrentState = this.get('currentStates')[0];\n }\n \n if (trace) {\n this.statechartLogTrace(\"BEGIN gotoState: %@\".fmt(state));\n this.statechartLogTrace(\"starting from current state: %@\".fmt(fromCurrentState));\n this.statechartLogTrace(\"current states before: %@\".fmt(this.get('currentStates')));\n }\n\n // If there is a current state to start the transition process from, then determine what\n // states are to be exited\n if (!SC.none(fromCurrentState)) {\n exitStates = this._createStateChain(fromCurrentState);\n }\n \n // Now determine the initial states to be entered\n enterStates = this._createStateChain(state);\n \n // Get the pivot state to indicate when to go from exiting states to entering states\n pivotState = this._findPivotState(exitStates, enterStates);\n\n if (pivotState) {\n if (trace) this.statechartLogTrace(\"pivot state = %@\".fmt(pivotState));\n if (pivotState.get('substatesAreConcurrent')) {\n this.statechartLogError(\"Can not go to state %@ from %@. Pivot state %@ has concurrent substates.\".fmt(state, fromCurrentState, pivotState));\n this._gotoStateLocked = NO;\n return;\n }\n }\n \n // Collect what actions to perform for the state transition process\n var gotoStateActions = [];\n \n // Go ahead and find states that are to be exited\n this._traverseStatesToExit(exitStates.shift(), exitStates, pivotState, gotoStateActions);\n \n // Now go find states that are to entered\n if (pivotState !== state) {\n this._traverseStatesToEnter(enterStates.pop(), enterStates, pivotState, useHistory, gotoStateActions);\n } else {\n this._traverseStatesToExit(pivotState, [], null, gotoStateActions);\n this._traverseStatesToEnter(pivotState, null, null, useHistory, gotoStateActions);\n }\n \n // Collected all the state transition actions to be performed. Now execute them.\n this._executeGotoStateActions(state, gotoStateActions, null, context);\n },\n \n /**\n Indicates if the statechart is in an active goto state process\n */\n gotoStateActive: function() {\n return this._gotoStateLocked;\n }.property(),\n \n /**\n Indicates if the statechart is in an active goto state process\n that has been suspended\n */\n gotoStateSuspended: function() {\n return this._gotoStateLocked && !!this._gotoStateSuspendedPoint;\n }.property(),\n \n /**\n Resumes an active goto state transition process that has been suspended.\n */\n resumeGotoState: function() {\n if (!this.get('gotoStateSuspended')) {\n this.statechartLogError(\"Can not resume goto state since it has not been suspended\");\n return;\n }\n \n var point = this._gotoStateSuspendedPoint;\n this._executeGotoStateActions(point.gotoState, point.actions, point.marker, point.context);\n },\n \n /** @private */\n _executeGotoStateActions: function(gotoState, actions, marker, context) {\n var action = null,\n len = actions.length,\n actionResult = null;\n \n marker = SC.none(marker) ? 0 : marker;\n \n for (; marker < len; marker += 1) {\n action = actions[marker];\n switch (action.action) {\n case Ki.EXIT_STATE:\n actionResult = this._exitState(action.state, context);\n break;\n \n case Ki.ENTER_STATE:\n actionResult = this._enterState(action.state, action.currentState, context);\n break;\n }\n \n //\n // Check if the state wants to perform an asynchronous action during\n // the state transition process. If so, then we need to first\n // suspend the state transition process and then invoke the \n // asynchronous action. Once called, it is then up to the state or something \n // else to resume this statechart's state transition process by calling the\n // statechart's resumeGotoState method.\n //\n if (SC.kindOf(actionResult, Ki.Async)) {\n this._gotoStateSuspendedPoint = {\n gotoState: gotoState,\n actions: actions,\n marker: marker + 1,\n context: context\n }; \n \n actionResult.tryToPerform(action.state);\n return;\n }\n }\n \n this.notifyPropertyChange('currentStates');\n \n if (this.get('allowTracing')) {\n this.statechartLogTrace(\"current states after: %@\".fmt(this.get('currentStates')));\n this.statechartLogTrace(\"END gotoState: %@\".fmt(gotoState));\n }\n \n // Okay. We're done with the current state transition. Make sure to unlock the\n // gotoState and let other pending state transitions execute.\n this._gotoStateSuspendedPoint = null;\n this._gotoStateLocked = NO;\n this._flushPendingStateTransition();\n },\n \n /** @private */\n _exitState: function(state, context) {\n if (state.get('currentSubstates').indexOf(state) >= 0) { \n var parentState = state.get('parentState');\n while (parentState) {\n parentState.get('currentSubstates').removeObject(state);\n parentState = parentState.get('parentState');\n }\n }\n \n if (this.get('allowTracing')) this.statechartLogTrace(\"exiting state: %@\".fmt(state));\n \n state.set('currentSubstates', []);\n state.notifyPropertyChange('isCurrentState');\n var result = this.exitState(state, context);\n \n if (this.get('monitorIsActive')) this.get('monitor').pushExitedState(state);\n \n state._traverseStatesToExit_skipState = NO;\n \n return result;\n },\n \n /**\n What will actually invoke a state's exitState method.\n \n Called during the state transition process whenever the gotoState method is\n invoked.\n \n @param state {Ki.State} the state whose enterState method is to be invoked\n @param context {Hash} a context hash object to provide the enterState method\n */\n exitState: function(state, context) {\n return state.exitState(context);\n },\n \n /** @private */\n _enterState: function(state, current, context) {\n var parentState = state.get('parentState');\n if (parentState && !state.get('isConcurrentState')) parentState.set('historyState', state);\n \n if (current) {\n parentState = state;\n while (parentState) {\n parentState.get('currentSubstates').push(state);\n parentState = parentState.get('parentState');\n }\n }\n \n if (this.get('allowTracing')) this.statechartLogTrace(\"entering state: %@\".fmt(state));\n \n state.notifyPropertyChange('isCurrentState');\n var result = this.enterState(state, context);\n \n if (this.get('monitorIsActive')) this.get('monitor').pushEnteredState(state);\n \n return result;\n },\n \n /**\n What will actually invoke a state's enterState method.\n \n Called during the state transition process whenever the gotoState method is\n invoked.\n \n @param state {Ki.State} the state whose enterState method is to be invoked\n @param context {Hash} a context hash object to provide the enterState method\n */\n enterState: function(state, context) {\n return state.enterState(context);\n },\n \n /**\n When called, the statechart will proceed to make transitions to the given state then follow that\n state's history state. \n \n You can either go to a given state's history recursively or non-recursively. To go to a state's history\n recursively means to following each history state's history state until no more history states can be\n followed. Non-recursively means to just to the given state's history state but do not recusively follow\n history states. If the given state does not have a history state, then the statechart will just follow\n normal procedures when making state transitions.\n \n Because a statechart can have one or more current states, depending on if the statechart has any concurrent\n states, it is optional to provided current state in which to start the state transition process from. If no\n current state is provided, then the statechart will default to the first current state that it has; which, \n depending on the make up of that statechart, can lead to unexpected outcomes. For a statechart with concurrent\n states, it is best to explicitly supply a current state.\n \n Method can be called in the following ways:\n \n {{{\n \n // With one arguments. \n gotoHistorytate()\n \n // With two arguments. \n gotoHistorytate(, )\n \n // With three arguments.\n gotoHistorytate(, , )\n gotoHistorytate(, , )\n \n // With four argumetns\n gotoHistorytate(, , , )\n \n }}}\n \n where is either a Ki.State object or a string and is a regular JS hash object.\n \n @param state {Ki.State|String} the state to go to and follow it's history state\n @param fromCurrentState {Ki.State|String} Optional. the current state to start the state transition process from\n @param recursive {Boolean} Optional. whether to follow history states recursively.\n */\n gotoHistoryState: function(state, fromCurrentState, recursive, context) {\n if (!this.get('statechartIsInitialized')) {\n this.statechartLogError(\"can not go to state %@'s history state. Statechart has not yet been initialized\".fmt(state));\n return;\n }\n \n var args = this._processGotoStateArgs(arguments);\n \n state = args.state;\n fromCurrentState = args.fromCurrentState;\n recursive = args.useHistory;\n context = args.context;\n \n state = this.getState(state);\n \n if (!state) {\n this.statechartLogError(\"Can not to goto state %@'s history state. Not a recognized state in statechart\".fmt(state));\n return;\n }\n \n var historyState = state.get('historyState');\n \n if (!recursive) { \n if (historyState) {\n this.gotoState(historyState, fromCurrentState, context);\n } else {\n this.gotoState(state, fromCurrentState, context);\n }\n } else {\n this.gotoState(state, fromCurrentState, YES, context);\n }\n },\n \n /**\n Sends a given event to all the statechart's current states.\n \n If a current state does can not respond to the sent event, then the current state's parent state\n will be tried. This process is recursively done until no more parent state can be tried.\n \n @param event {String} name of the event\n @param arg1 {Object} optional argument\n @param arg2 {Object} optional argument\n @returns {SC.Responder} the responder that handled it or null\n */\n sendEvent: function(event, arg1, arg2) {\n \n if (this.get('isDestroyed')) {\n this.statechartLogError(\"can send event %@. statechart is destroyed\".fmt(event));\n return;\n }\n \n var statechartHandledEvent = NO,\n eventHandled = NO,\n currentStates = this.get('currentStates').slice(),\n len = 0,\n i = 0,\n state = null,\n trace = this.get('allowTracing');\n \n if (this._sendEventLocked || this._goStateLocked) {\n // Want to prevent any actions from being processed by the states until \n // they have had a chance to handle the most immediate action or completed \n // a state transition\n this._pendingSentEvents.push({\n event: event,\n arg1: arg1,\n arg2: arg2\n });\n\n return;\n }\n \n this._sendEventLocked = YES;\n \n if (trace) {\n this.statechartLogTrace(\"BEGIN sendEvent: event<%@>\".fmt(event));\n }\n \n len = currentStates.get('length');\n for (; i < len; i += 1) {\n eventHandled = NO;\n state = currentStates[i];\n if (!state.get('isCurrentState')) continue;\n while (!eventHandled && state) {\n eventHandled = state.tryToHandleEvent(event, arg1, arg2);\n if (!eventHandled) state = state.get('parentState');\n else statechartHandledEvent = YES;\n }\n }\n \n // Now that all the states have had a chance to process the \n // first event, we can go ahead and flush any pending sent events.\n this._sendEventLocked = NO;\n \n if (trace) {\n if (!statechartHandledEvent) this.statechartLogTrace(\"No state was able handle event %@\".fmt(event));\n this.statechartLogTrace(\"END sendEvent: event<%@>\".fmt(event));\n }\n \n var result = this._flushPendingSentEvents();\n \n return statechartHandledEvent ? this : (result ? this : null);\n },\n\n /** @private\n \n Creates a chain of states from the given state to the greatest ancestor state (the root state). Used\n when perform state transitions.\n */\n _createStateChain: function(state) {\n var chain = [];\n \n while (state) {\n chain.push(state);\n state = state.get('parentState');\n }\n \n return chain;\n },\n \n /** @private\n \n Finds a pivot state from two given state chains. The pivot state is the state indicating when states\n go from being exited to states being entered during the state transition process. The value \n returned is the fist matching state between the two given state chains. \n */\n _findPivotState: function(stateChain1, stateChain2) {\n if (stateChain1.length === 0 || stateChain2.length === 0) return null;\n \n var pivot = stateChain1.find(function(state, index) {\n if (stateChain2.indexOf(state) >= 0) return YES;\n });\n \n return pivot;\n },\n \n /** @private\n \n Recursively follow states that are to be exited during a state transition process. The exit\n process is to start from the given state and work its way up to when either all exit\n states have been reached based on a given exit path or when a stop state has been reached.\n \n @param state {State} the state to be exited\n @param exitStatePath {Array} an array representing a path of states that are to be exited\n @param stopState {State} an explicit state in which to stop the exiting process\n */\n _traverseStatesToExit: function(state, exitStatePath, stopState, gotoStateActions) { \n if (!state || state === stopState) return;\n \n var trace = this.get('allowTracing');\n \n // This state has concurrent substates. Therefore we have to make sure we\n // exit them up to this state before we can go any further up the exit chain.\n if (state.get('substatesAreConcurrent')) {\n var i = 0,\n currentSubstates = state.get('currentSubstates'),\n len = currentSubstates.length,\n currentState = null;\n \n for (; i < len; i += 1) {\n currentState = currentSubstates[i];\n if (currentState._traverseStatesToExit_skipState === YES) continue;\n var chain = this._createStateChain(currentState);\n this._traverseStatesToExit(chain.shift(), chain, state, gotoStateActions);\n }\n }\n \n gotoStateActions.push({ action: Ki.EXIT_STATE, state: state });\n if (state.get('isCurrentState')) state._traverseStatesToExit_skipState = YES;\n this._traverseStatesToExit(exitStatePath.shift(), exitStatePath, stopState, gotoStateActions);\n },\n \n /** @private\n \n Recursively follow states that are to be entred during the state transition process. The\n enter process is to start from the given state and work its way down a given enter path. When\n the end of enter path has been reached, then continue entering states based on whether \n an initial substate is defined, there are concurrent substates or history states are to be\n followed; when none of those condition are met then the enter process is done.\n \n @param state {State} the sate to be entered\n @param enterStatePath {Array} an array representing an initial path of states that are to be entered\n @param pivotState {State} The state pivoting when to go from exiting states to entering states\n @param useHistory {Boolean} indicates whether to recursively follow history states \n */\n _traverseStatesToEnter: function(state, enterStatePath, pivotState, useHistory, gotoStateActions) {\n if (!state) return;\n \n var trace = this.get('allowTracing');\n \n // We do not want to enter states in the enter path until the pivot state has been reached. After\n // the pivot state has been reached, then we can go ahead and actually enter states.\n if (pivotState) {\n if (state !== pivotState) {\n this._traverseStatesToEnter(enterStatePath.pop(), enterStatePath, pivotState, useHistory, gotoStateActions);\n } else {\n this._traverseStatesToEnter(enterStatePath.pop(), enterStatePath, null, useHistory, gotoStateActions);\n }\n }\n \n // If no more explicit enter path instructions, then default to enter states based on \n // other criteria\n else if (!enterStatePath || enterStatePath.length === 0) {\n var gotoStateAction = { action: Ki.ENTER_STATE, state: state, currentState: NO };\n gotoStateActions.push(gotoStateAction);\n \n var initialSubstate = state.get('initialSubstate'),\n historyState = state.get('historyState');\n \n // State has concurrent substates. Need to enter all of the substates\n if (state.get('substatesAreConcurrent')) {\n this._traverseConcurrentStatesToEnter(state.get('substates'), null, useHistory, gotoStateActions);\n }\n \n // State has substates and we are instructed to recursively follow the state's\n // history state if it has one.\n else if (state.get('hasSubstates') && historyState && useHistory) {\n this._traverseStatesToEnter(historyState, null, null, useHistory, gotoStateActions);\n }\n \n // State has an initial substate to enter\n else if (initialSubstate) {\n if (SC.kindOf(initialSubstate, Ki.HistoryState)) {\n if (!useHistory) useHistory = initialSubstate.get('isRecursive');\n initialSubstate = initialSubstate.get('state');\n }\n this._traverseStatesToEnter(initialSubstate, null, null, useHistory, gotoStateActions); \n } \n \n // Looks like we hit the end of the road. Therefore the state has now become\n // a current state of the statechart.\n else {\n gotoStateAction.currentState = YES;\n }\n }\n \n // Still have an explicit enter path to follow, so keep moving through the path.\n else if (enterStatePath.length > 0) {\n gotoStateActions.push({ action: Ki.ENTER_STATE, state: state });\n var nextState = enterStatePath.pop();\n this._traverseStatesToEnter(nextState, enterStatePath, null, useHistory, gotoStateActions); \n \n // We hit a state that has concurrent substates. Must go through each of the substates\n // and enter them\n if (state.get('substatesAreConcurrent')) {\n this._traverseConcurrentStatesToEnter(state.get('substates'), nextState, useHistory, gotoStateActions);\n }\n }\n },\n \n /** @override\n \n Returns YES if the named value translates into an executable function on\n any of the statechart's current states or the statechart itself.\n \n @param event {String} the property name to check\n @returns {Boolean}\n */\n respondsTo: function(event) {\n var currentStates = this.get('currentStates'),\n len = currentStates.get('length'), \n i = 0, state = null;\n \n for (; i < len; i += 1) {\n state = currentStates.objectAt(i);\n while (state) {\n if (state.respondsToEvent(event)) return true;\n state = state.get('parentState');\n }\n }\n \n // None of the current states can respond. Now check the statechart itself\n return SC.typeOf(this[event]) === SC.T_FUNCTION; \n },\n \n /** @override\n \n Attemps to handle a given event against any of the statechart's current states and the\n statechart itself. If any current state can handle the event or the statechart itself can\n handle the event then YES is returned, otherwise NO is returned.\n \n @param event {String} what to perform\n @param arg1 {Object} Optional\n @param arg2 {Object} Optional\n @returns {Boolean} YES if handled, NO if not handled\n */\n tryToPerform: function(event, arg1, arg2) {\n if (this.respondsTo(event)) {\n if (SC.typeOf(this[event]) === SC.T_FUNCTION) return (this[event](arg1, arg2) !== NO);\n else return !!this.sendEvent(event, arg1, arg2);\n } return NO;\n },\n \n /**\n Used to invoke a method on current states. If the method can not be executed\n on a current state, then the state's parent states will be tried in order\n of closest ancestry.\n \n A few notes: \n \n 1) Calling this is not the same as calling sendEvent or sendAction.\n Rather, this should be seen as calling normal methods on a state that \n will *not* call gotoState or gotoHistoryState. \n \n 2) A state will only ever be invoked once per call. So if there are two \n or more current states that have the same parent state, then that parent \n state will only be invoked once if none of the current states are able\n to invoke the given method.\n \n When calling this method, you are able to supply zero ore more arguments\n that can be pass onto the method called on the states. As an example\n \n {{{\n \n invokeStateMethod('render', context, firstTime);\n \n }}}\n \n The above call will invoke the render method on the current states\n and supply the context and firstTime arguments to the method. \n \n Because a statechart can have more than one current state and the method \n invoked may return a value, the addition of a callback function may be provided \n in order to handle the returned value for each state. As an example, let's say\n we want to call a calculate method on the current states where the method\n will return a value when invoked. We can handle the returned values like so:\n \n {{{\n \n invokeStateMethod('calculate', value, function(state, result) {\n // .. handle the result returned from calculate that was invoked\n // on the given state\n })\n \n }}}\n \n If the method invoked does not return a value and a callback function is\n supplied, then result value will simply be undefined. In all cases, if\n a callback function is given, it must be the last value supplied to this\n method.\n \n invokeStateMethod will return a value if only one state was able to have \n the given method invoked on it, otherwise no value is returned. \n \n @param methodName {String} methodName a method name\n @param args {Object...} Optional. any additional arguments\n @param func {Function} Optional. a callback function. Must be the last\n value supplied if provided.\n \n @returns a value if the number of current states is one, otherwise undefined\n is returned. The value is the result of the method that got invoked\n on a state.\n */\n invokeStateMethod: function(methodName, args, func) {\n if (methodName === 'unknownEvent') {\n this.statechartLogError(\"can not invoke method unkownEvent\");\n return;\n }\n \n args = SC.A(arguments); args.shift();\n \n var len = args.length, \n arg = len > 0 ? args[len - 1] : null,\n callback = SC.typeOf(arg) === SC.T_FUNCTION ? args.pop() : null,\n currentStates = this.get('currentStates'), \n i = 0, state = null, checkedStates = {},\n method, result = undefined, calledStates = 0;\n \n len = currentStates.get('length');\n \n for (; i < len; i += 1) {\n state = currentStates.objectAt(i);\n while (state) {\n if (checkedStates[state.get('fullPath')]) break;\n checkedStates[state.get('fullPath')] = YES;\n method = state[methodName];\n if (SC.typeOf(method) === SC.T_FUNCTION && !method.isEventHandler) {\n result = method.apply(state, args);\n if (callback) callback.call(this, state, result);\n calledStates += 1; \n break;\n }\n state = state.get('parentState');\n }\n }\n \n return calledStates === 1 ? result : undefined;\n },\n \n /** @private\n \n Iterate over all the given concurrent states and enter them\n */\n _traverseConcurrentStatesToEnter: function(states, exclude, useHistory, gotoStateActions) {\n var i = 0,\n len = states.length,\n state = null;\n \n for (; i < len; i += 1) {\n state = states[i];\n if (state !== exclude) this._traverseStatesToEnter(state, null, null, useHistory, gotoStateActions);\n }\n },\n \n /** @private\n \n Called by gotoState to flush a pending state transition at the front of the \n pending queue.\n */\n _flushPendingStateTransition: function() {\n if (!this._pendingStateTransitions) {\n this.statechartLogError(\"Unable to flush pending state transition. _pendingStateTransitions is invalid\");\n return;\n }\n var pending = this._pendingStateTransitions.shift();\n if (!pending) return;\n this.gotoState(pending.state, pending.fromCurrentState, pending.useHistory, pending.context);\n },\n \n /** @private\n\n Called by sendEvent to flush a pending actions at the front of the pending\n queue\n */\n _flushPendingSentEvents: function() {\n var pending = this._pendingSentEvents.shift();\n if (!pending) return null;\n return this.sendEvent(pending.event, pending.arg1, pending.arg2);\n },\n \n /** @private */\n _monitorIsActiveDidChange: function() {\n if (this.get('monitorIsActive') && SC.none(this.get('monitor'))) {\n this.set('monitor', Ki.StatechartMonitor.create());\n }\n }.observes('monitorIsActive'),\n \n /** @private \n Will process the arguments supplied to the gotoState method.\n \n TODO: Come back to this and refactor the code. It works, but it\n could certainly be improved\n */\n _processGotoStateArgs: function(args) {\n var processedArgs = { \n state: null, \n fromCurrentState: null, \n useHistory: false, \n context: null \n },\n len = null,\n value = null;\n \n args = SC.$A(args);\n args = args.filter(function(item) {\n return !(item === undefined); \n });\n len = args.length;\n \n if (len < 1) return processedArgs;\n \n processedArgs.state = args[0];\n \n if (len === 2) {\n value = args[1];\n switch (SC.typeOf(value)) {\n case SC.T_BOOL: \n processedArgs.useHistory = value;\n break;\n case SC.T_HASH:\n processedArgs.context = value;\n break;\n default:\n processedArgs.fromCurrentState = value;\n }\n }\n else if (len === 3) {\n value = args[1];\n if (SC.typeOf(value) === SC.T_BOOL) {\n processedArgs.useHistory = value;\n processedArgs.context = args[2];\n } else {\n processedArgs.fromCurrentState = value;\n value = args[2];\n if (SC.typeOf(value) === SC.T_BOOL) {\n processedArgs.useHistory = value;\n } else {\n processedArgs.context = value;\n }\n }\n }\n else {\n processedArgs.fromCurrentState = args[1];\n processedArgs.useHistory = args[2];\n processedArgs.context = args[3];\n }\n \n return processedArgs;\n },\n \n /** @private \n \n Will return a newly constructed root state class. The root state will have substates added to\n it based on properties found on this state that derive from a Ki.State class. For the\n root state to be successfully built, the following much be met:\n \n - The rootStateExample property must be defined with a class that derives from Ki.State\n - Either the initialState or statesAreConcurrent property must be set, but not both\n - There must be one or more states that can be added to the root state\n \n */\n _constructRootStateClass: function() {\n var rsExampleKey = 'rootStateExample',\n rsExample = this.get(rsExampleKey),\n initialState = this.get('initialState'),\n statesAreConcurrent = this.get('statesAreConcurrent'),\n stateCount = 0,\n key, value, valueIsFunc, attrs = {};\n \n if (SC.typeOf(rsExample) === SC.T_FUNCTION && rsExample.statePlugin) {\n rsExample = rsExample.apply(this);\n }\n\n if (!(SC.kindOf(rsExample, Ki.State) && rsExample.isClass)) {\n this._logStatechartCreationError(\"Invalid root state example\");\n return null;\n }\n \n if (statesAreConcurrent && !SC.empty(initialState)) {\n this._logStatechartCreationError(\"Can not assign an initial state when states are concurrent\");\n } else if (statesAreConcurrent) {\n attrs.substatesAreConcurrent = YES;\n } else if (SC.typeOf(initialState) === SC.T_STRING) {\n attrs.initialSubstate = initialState;\n } else {\n this._logStatechartCreationError(\"Must either define initial state or assign states as concurrent\");\n return null;\n }\n \n for (key in this) {\n if (key === rsExampleKey) continue;\n \n value = this[key];\n valueIsFunc = SC.typeOf(value) === SC.T_FUNCTION;\n \n if (valueIsFunc && value.statePlugin) {\n value = value.apply(this);\n }\n \n if (SC.kindOf(value, Ki.State) && value.isClass && this[key] !== this.constructor) {\n attrs[key] = value;\n stateCount += 1;\n }\n }\n \n if (stateCount === 0) {\n this._logStatechartCreationError(\"Must define one or more states\");\n return null;\n }\n \n return rsExample.extend(attrs);\n },\n \n /** @private */\n _logStatechartCreationError: function(msg) {\n SC.Logger.error(\"Unable to create statechart for %@: %@.\".fmt(this, msg));\n },\n \n /** \n Used to log a statechart trace message\n */\n statechartLogTrace: function(msg) {\n SC.Logger.info(\"%@: %@\".fmt(this.get('statechartLogPrefix'), msg));\n },\n \n /**\n Used to log a statechart error message\n */\n statechartLogError: function(msg) {\n SC.Logger.error(\"ERROR %@: %@\".fmt(this.get('statechartLogPrefix'), msg));\n },\n \n /** \n Used to log a statechart warning message\n */\n statechartLogWarning: function(msg) {\n if (this.get('suppressStatechartWarnings')) return;\n SC.Logger.warn(\"WARN %@: %@\".fmt(this.get('statechartLogPrefix'), msg));\n },\n \n /** @property */\n statechartLogPrefix: function() {\n var className = SC._object_className(this.constructor),\n name = this.get('name'), prefix;\n \n if (SC.empty(name)) prefix = \"%@<%@>\".fmt(className, SC.guidFor(this));\n else prefix = \"%@<%@, %@>\".fmt(className, name, SC.guidFor(this));\n \n return prefix;\n }.property().cacheable(),\n\n /** @private @property */\n allowTracing: function() {\n var key = this.get('statechartTraceKey');\n return this.get(key);\n }.property().cacheable(),\n\n /** @private */\n _statechartTraceDidChange: function() {\n this.notifyPropertyChange('allowTracing');\n }\n \n};\n\n/** \n The default name given to a statechart's root state\n*/\nKi.ROOT_STATE_NAME = \"__ROOT_STATE__\";\n\n/**\n Constants used during the state transition process\n*/\nKi.EXIT_STATE = 0;\nKi.ENTER_STATE = 1;\n\n/**\n A Startchart class. \n*/\nKi.Statechart = SC.Object.extend(Ki.StatechartManager, {\n autoInitStatechart: NO\n});\n\nKi.Statechart.design = Ki.Statechart.extend;\n\n/**\n Represents a call that is intended to be asynchronous. This is\n used during a state transition process when either entering or\n exiting a state.\n*/\nKi.Async = SC.Object.extend({\n \n func: null,\n \n arg1: null,\n \n arg2: null,\n \n /** @private\n Called by the statechart\n */\n tryToPerform: function(state) {\n var func = this.get('func'),\n arg1 = this.get('arg1'),\n arg2 = this.get('arg2'),\n funcType = SC.typeOf(func);\n \n if (funcType === SC.T_STRING) {\n state.tryToPerform(func, arg1, arg2);\n } \n else if (funcType === SC.T_FUNCTION) {\n func.apply(state, [arg1, arg2]);\n }\n }\n \n});\n\n/**\n Singleton\n*/\nKi.Async.mixin({\n \n /**\n Call in either a state's enterState or exitState method when you\n want a state to perform an asynchronous action, such as an animation.\n \n Examples:\n \n {{\n \n Ki.State.extend({\n \n enterState: function() {\n return Ki.Async.perform('foo');\n },\n \n exitState: function() {\n return Ki.Async.perform('bar', 100);\n }\n \n foo: function() { ... },\n \n bar: function(arg) { ... }\n \n });\n \n }}\n \n @param func {String|Function} the functio to be invoked on a state\n @param arg1 Optional. An argument to pass to the given function\n @param arg2 Optional. An argument to pass to the given function\n @return {Ki.Async} a new instance of a Ki.Async\n */\n perform: function(func, arg1, arg2) {\n return Ki.Async.create({ func: func, arg1: arg1, arg2: arg2 });\n }\n \n});", "frameworks/foundation/tests/event_handling/basic/with_concurrent_states.js": "// ==========================================================================\n// Ki.Statechart Unit Test\n// ==========================================================================\n/*globals Ki */\n\nvar statechart = null;\n\n// ..........................................................\n// CONTENT CHANGING\n// \n\nmodule(\"Ki.Statechart: With Concurrent States - Send Event Tests\", {\n setup: function() {\n\n statechart = Ki.Statechart.create({\n \n monitorIsActive: YES,\n \n rootState: Ki.State.design({\n \n initialSubstate: 'x',\n \n x: Ki.State.design({\n \n substatesAreConcurrent: YES,\n \n a: Ki.State.design({\n\n initialSubstate: 'c',\n\n eventAInvoked: NO,\n\n eventA: function() { this.set('eventAInvoked', YES); },\n\n c: Ki.State.design({\n eventB: function() { this.gotoState('d'); },\n eventD: function() { this.gotoState('y'); }\n }),\n\n d: Ki.State.design({\n eventC: function() { this.gotoState('c'); }\n })\n\n }),\n\n b: Ki.State.design({\n\n initialSubstate: 'e',\n\n eventAInvoked: NO,\n\n eventA: function() { this.set('eventAInvoked', YES); },\n\n e: Ki.State.design({\n eventB: function() { this.gotoState('f'); },\n eventD: function() { this.gotoState('y'); }\n }),\n\n f: Ki.State.design({\n eventC: function() { this.gotoState('e'); }\n })\n\n })\n \n }),\n \n y: Ki.State.design()\n \n })\n \n });\n \n statechart.initStatechart();\n },\n \n teardown: function() {\n statechart.destroy();\n statechart = null;\n }\n});\n\ntest(\"send event eventA\", function() {\n var monitor = statechart.get('monitor'),\n stateA = statechart.getState('a'),\n stateB = statechart.getState('b');\n \n monitor.reset();\n\n equals(stateA.get('eventAInvoked'), false);\n equals(stateB.get('eventAInvoked'), false);\n\n statechart.sendEvent('eventA');\n \n equals(monitor.get('length'), 0, 'state sequence should be of length 0');\n equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');\n equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');\n equals(stateA.get('eventAInvoked'), true);\n equals(stateB.get('eventAInvoked'), true);\n});\n\ntest(\"send event eventB\", function() {\n var monitor = statechart.get('monitor');\n \n monitor.reset();\n \n equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');\n equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');\n \n statechart.sendEvent('eventB');\n \n equals(statechart.get('currentStateCount'), 2, 'current state count should be 2');\n equals(statechart.stateIsCurrentState('d'), true, 'current state should be d');\n equals(statechart.stateIsCurrentState('f'), true, 'current state should be f');\n \n equals(monitor.get('length'), 4, 'state sequence should be of length 4');\n equals(monitor.matchSequence()\n .begin()\n .exited('c').entered('d')\n .exited('e').entered('f')\n .end(), \n true, 'sequence should be exited[c], entered[d], exited[e], entered[f]');\n});\n\ntest(\"send event eventB then eventC\", function() {\n var monitor = statechart.get('monitor');\n\n statechart.sendEvent('eventB');\n \n equals(statechart.stateIsCurrentState('d'), true, 'current state should be d');\n equals(statechart.stateIsCurrentState('f'), true, 'current state should be f');\n\n monitor.reset();\n \n statechart.sendEvent('eventC');\n\n equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');\n equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');\n\n equals(monitor.get('length'), 4, 'state sequence should be of length 4');\n equals(monitor.matchSequence()\n .begin()\n .exited('d').entered('c')\n .exited('f').entered('e')\n .end(), \n true, 'sequence should be exited[d], entered[c], exited[f], entered[e]');\n});\n\ntest(\"send event eventD\", function() {\n var monitor = statechart.get('monitor');\n \n monitor.reset();\n \n equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');\n equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');\n \n statechart.sendEvent('eventD');\n \n equals(monitor.get('length'), 6, 'state sequence should be of length 6');\n equals(monitor.matchSequence()\n .begin()\n .exited('c', 'a', 'e', 'b', 'x').entered('y')\n .end(), \n true, 'sequence should be exited[c, a, e, b, x], entered[y]');\n \n equals(statechart.currentStateCount(), 1, 'statechart should only have 1 current state');\n equals(statechart.stateIsCurrentState('c'), false, 'current state not should be c');\n equals(statechart.stateIsCurrentState('e'), false, 'current state not should be e');\n equals(statechart.stateIsCurrentState('y'), true, 'current state should be y');\n});\n\ntest(\"send event eventZ\", function() {\n var monitor = statechart.get('monitor');\n \n monitor.reset();\n \n equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');\n equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');\n \n equals(monitor.get('length'), 0, 'state sequence should be of length 0');\n \n equals(statechart.stateIsCurrentState('c'), true, 'current state should be c');\n equals(statechart.stateIsCurrentState('e'), true, 'current state should be e');\n});\n"}, "files_after": {"frameworks/foundation/system/statechart.js": "// ==========================================================================\n// Project: Ki - A Statechart Framework for SproutCore\n// License: Licensed under MIT license (see license.js)\n// ==========================================================================\n\n/*globals Ki */\n\nsc_require('system/state');\n\n/**\n The startchart manager mixin allows an object to be a statechart. By becoming a statechart, the\n object can then be manage a set of its own states.\n \n This implemention of the statechart manager closely follows the concepts stated in D. Harel's \n original paper \"Statecharts: A Visual Formalism For Complex Systems\" \n (www.wisdom.weizmann.ac.il/~harel/papers/Statecharts.pdf). \n \n The statechart allows for complex state heircharies by nesting states within states, and \n allows for state orthogonality based on the use of concurrent states.\n \n At minimum, a statechart must have one state: The root state. All other states in the statechart\n are a decendents (substates) of the root state.\n \n The following example shows how states are nested within a statechart:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n initialSubstate: 'stateA',\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n })\n \n })\n \n }}}\n \n Note how in the example above, the root state as an explicit initial substate to enter into. If no\n initial substate is provided, then the statechart will default to the the state's first substate.\n \n You can also defined states without explicitly defining the root state. To do so, simply create properties\n on your object that represents states. Upon initialization, a root state will be constructed automatically\n by the mixin and make the states on the object substates of the root state. As an example:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n initialState: 'stateA',\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n \n })\n \n }}} \n \n If you liked to specify a class that should be used as the root state but using the above method to defined\n states, you can set the rootStateExample property with a class that extends from Ki.State. If the \n rootStateExaple property is not explicitly assigned the then default class used will be Ki.State.\n \n To provide your statechart with orthogonality, you use concurrent states. If you use concurrent states,\n then your statechart will have multiple current states. That is because each concurrent state represents an\n independent state structure from other concurrent states. The following example shows how to provide your\n statechart with concurrent states:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n substatesAreConcurrent: YES,\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n })\n \n })\n \n }}}\n \n Above, to indicate that a state's substates are concurrent, you just have to set the substatesAreConcurrent to \n YES. Once done, then stateA and stateB will be independent of each other and each will manage their\n own current substates. The root state will then have more then one current substate.\n \n To define concurrent states directly on the object without explicitly defining a root, you can do the \n following:\n \n {{{\n\n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n statesAreConcurrent: YES,\n \n stateA: Ki.State.design({\n // ... can continue to nest further states\n }),\n \n stateB: Ki.State.design({\n // ... can continue to nest further states\n })\n \n })\n\n }}}\n \n Remember that a startchart can have a mixture of nested and concurrent states in order for you to \n create as complex of statecharts that suite your needs. Here is an example of a mixed state structure:\n \n {{{\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n initialSubstate: 'stateA',\n \n stateA: Ki.State.design({\n \n substatesAreConcurrent: YES,\n \n stateM: Ki.State.design({ ... })\n stateN: Ki.State.design({ ... })\n stateO: Ki.State.design({ ... })\n \n }),\n \n stateB: Ki.State.design({\n \n initialSubstate: 'stateX',\n \n stateX: Ki.State.design({ ... })\n stateY: Ki.State.desgin({ ... })\n \n })\n })\n \n })\n \n }}}\n \n Depending on your needs, a statechart can have lots of states, which can become hard to manage all within\n one file. To modularize your states and make them easier to manage and maintain, you can plug-in states\n into other states. Let's say we are using the statechart in the last example above, and all the code is \n within one file. We could update the code and split the logic across two or more files like so:\n \n {{{\n ---- state_a.js\n \n MyApp.StateA = Ki.State.extend({\n \n substatesAreConcurrent: YES,\n \n stateM: Ki.State.design({ ... })\n stateN: Ki.State.design({ ... })\n stateO: Ki.State.design({ ... })\n \n });\n \n ---- state_b.js\n \n MyApp.StateB = Ki.State.extend({\n \n substatesAreConcurrent: YES,\n \n stateM: Ki.State.design({ ... })\n stateN: Ki.State.design({ ... })\n stateO: Ki.State.design({ ... })\n \n });\n \n ---- statechart.js\n \n MyApp.Statechart = SC.Object.extend(Ki.StatechartManager, {\n \n rootState: Ki.State.design({\n \n initialSubstate: 'stateA',\n \n stateA: Ki.State.plugin('MyApp.StateA'),\n \n stateB: Ki.State.plugin('MyApp.StateB')\n \n })\n \n })\n \n }}}\n \n Using state plug-in functionality is optional. If you use the plug-in feature you can break up your statechart\n into as many files as you see fit.\n\n*/\n\nKi.StatechartManager = {\n \n // Walk like a duck\n isResponderContext: YES,\n \n // Walk like a duck\n isStatechart: YES,\n \n /**\n Indicates if this statechart has been initialized\n\n @property {Boolean}\n */\n statechartIsInitialized: NO,\n \n /**\n The root state of this statechart. All statecharts must have a root state.\n \n If this property is left unassigned then when the statechart is initialized\n it will used the rootStateExample, initialState, and statesAreConcurrent\n properties to construct a root state.\n \n @see #rootStateExample\n @see #initialState\n @see #statesAreConcurrent\n \n @property {Ki.State}\n */\n rootState: null,\n \n /** \n Represents the class used to construct a class that will be the root state for\n this statechart. The class assigned must derive from Ki.State. \n \n This property will only be used if the rootState property is not assigned.\n \n @see #rootState\n \n @property {Ki.State}\n */\n rootStateExample: Ki.State,\n \n /** \n Indicates what state should be the intiail state of this statechart. The value\n assigned must be the name of a property on this object that represents a state.\n As well, the statesAreConcurrent must be set to NO.\n \n This property will only be used if the rootState property is not assigned.\n \n @see #rootState\n \n @property {String} \n */\n initialState: null,\n \n /** \n Indicates if properties on this object representing states are concurrent to each other.\n If YES then they are concurrent, otherwise they are not. If the YES, then the\n initialState property must not be assigned.\n \n This property will only be used if the rootState property is not assigned.\n \n @see #rootState\n \n @property {Boolean}\n */\n statesAreConcurrent: NO,\n \n /** \n Indicates whether to use a monitor to monitor that statechart's activities. If true then\n the monitor will be active, otherwise the monitor will not be used. Useful for debugging\n purposes.\n \n @property {Boolean}\n */\n monitorIsActive: NO,\n \n /**\n A statechart monitor that can be used to monitor this statechart. Useful for debugging purposes.\n A monitor will only be used if monitorIsActive is true.\n \n @property {Ki.StatechartMonitor}\n */\n monitor: null,\n \n /**\n Used to specify what property (key) on the statechart should be used as the trace property. By\n default the property is 'trace'.\n\n @property {String}\n */\n statechartTraceKey: 'trace',\n\n /**\n Indicates whether to trace the statecharts activities. If true then the statechart will output\n its activites to the browser's JS console. Useful for debugging purposes.\n\n @see #statechartTraceKey\n\n @property {Boolean}\n */\n trace: NO,\n \n /**\n Used to specify what property (key) on the statechart should be used as the owner property. By\n default the property is 'owner'.\n\n @property {String}\n */\n statechartOwnerKey: 'owner',\n\n /**\n Sets who the owner is of this statechart. If null then the owner is this object otherwise\n the owner is the assigned object. \n\n @see #statechartOwnerKey\n\n @property {SC.Object}\n */\n owner: null,\n\n /** \n Indicates if the statechart should be automatically initialized by this\n object after it has been created. If YES then initStatechart will be\n called automatically, otherwise it will not.\n \n @property {Boolean}\n */\n autoInitStatechart: YES,\n \n /**\n If yes, any warning messages produced by the statechart or any of its states will\n not be logged, otherwise all warning messages will be logged. \n \n While designing and debugging your statechart, it's best to keep this value false.\n In production you can then suppress the warning messages.\n \n @property {Boolean}\n */\n suppressStatechartWarnings: NO,\n \n initMixin: function() {\n if (this.get('autoInitStatechart')) {\n this.initStatechart();\n }\n },\n \n destroyMixin: function() {\n var root = this.get('rootState'),\n traceKey = this.get('statechartTraceKey');\n\n this.removeObserver(traceKey, this, '_statechartTraceDidChange');\n\n root.destroy();\n this.set('rootState', null);\n },\n\n /**\n Initializes the statechart. By initializing the statechart, it will create all the states and register\n them with the statechart. Once complete, the statechart can be used to go to states and send events to.\n */\n initStatechart: function() {\n if (this.get('statechartIsInitialized')) return;\n \n this._gotoStateLocked = NO;\n this._sendEventLocked = NO;\n this._pendingStateTransitions = [];\n this._pendingSentEvents = [];\n \n this.sendAction = this.sendEvent;\n \n if (this.get('monitorIsActive')) {\n this.set('monitor', Ki.StatechartMonitor.create());\n }\n\n var traceKey = this.get('statechartTraceKey');\n\n this.addObserver(traceKey, this, '_statechartTraceDidChange');\n this._statechartTraceDidChange();\n\n var trace = this.get('allowTracing'),\n rootState = this.get('rootState'),\n msg;\n \n if (trace) this.statechartLogTrace(\"BEGIN initialize statechart\");\n \n // If no root state was explicitly defined then try to construct\n // a root state class\n if (!rootState) {\n rootState = this._constructRootStateClass();\n }\n else if (SC.typeOf(rootState) === SC.T_FUNCTION && rootState.statePlugin) {\n rootState = rootState.apply(this);\n }\n \n if (!(SC.kindOf(rootState, Ki.State) && rootState.isClass)) {\n msg = \"Unable to initialize statechart. Root state must be a state class\";\n this.statechartLogError(msg);\n throw msg;\n }\n \n rootState = this.createRootState(rootState, { \n statechart: this, \n name: Ki.ROOT_STATE_NAME \n });\n \n this.set('rootState', rootState);\n rootState.initState();\n \n if (SC.kindOf(rootState.get('initialSubstate'), Ki.EmptyState)) {\n msg = \"Unable to initialize statechart. Root state must have an initial substate explicilty defined\";\n this.statechartLogError(msg);\n throw msg;\n }\n \n if (!SC.empty(this.get('initialState'))) {\n var key = 'initialState';\n this.set(key, rootState.get(this.get(key)));\n } \n \n this.set('statechartIsInitialized', YES);\n this.gotoState(rootState);\n \n if (trace) this.statechartLogTrace(\"END initialize statechart\");\n },\n \n /**\n Will create a root state for the statechart\n */\n createRootState: function(state, attrs) {\n if (!attrs) attrs = {};\n state = state.create(attrs);\n return state;\n },\n \n /**\n Returns an array of all the current states for this statechart\n \n @returns {Array} the current states\n */\n currentStates: function() {\n return this.getPath('rootState.currentSubstates');\n }.property().cacheable(),\n \n /**\n Returns the first current state for this statechart. \n \n @return {Ki.State}\n */\n firstCurrentState: function() {\n var cs = this.get('currentStates');\n return cs ? cs.objectAt(0) : null;\n }.property('currentStates').cacheable(),\n \n /**\n Returns the count of the current states for this statechart\n \n @returns {Number} the count \n */\n currentStateCount: function() {\n return this.getPath('currentStates.length');\n }.property('currentStates').cacheable(),\n \n /**\n Checks if a given state is a current state of this statechart. \n \n @param state {State} the state to check\n @returns {Boolean} true if the state is a current state, otherwise fals is returned\n */\n stateIsCurrentState: function(state) {\n return this.get('rootState').stateIsCurrentSubstate(state);\n },\n \n /**\n Checks if the given value represents a state is this statechart\n \n @param value {State|String} either a state object or the name of a state\n @returns {Boolean} true if the state does belong ot the statechart, otherwise false is returned\n */\n doesContainState: function(value) {\n return !SC.none(this.getState(value));\n },\n \n /**\n Gets a state from the statechart that matches the given value\n \n @param value {State|String} either a state object of the name of a state\n @returns {State} if a match then the matching state is returned, otherwise null is returned \n */\n getState: function(value) {\n return this.get('rootState').getSubstate(value);\n },\n \n /**\n When called, the statechart will proceed with making state transitions in the statechart starting from \n a current state that meet the statechart conditions. When complete, some or all of the statechart's \n current states will be changed, and all states that were part of the transition process will either \n be exited or entered in a specific order.\n \n The state that is given to go to will not necessarily be a current state when the state transition process\n is complete. The final state or states are dependent on factors such an initial substates, concurrent \n states, and history states.\n \n Because the statechart can have one or more current states, it may be necessary to indicate what current state\n to start from. If no current state to start from is provided, then the statechart will default to using\n the first current state that it has; depending of the make up of the statechart (no concurrent state vs.\n with concurrent states), the outcome may be unexpected. For a statechart with concurrent states, it is best\n to provide a current state in which to start from.\n \n When using history states, the statechart will first make transitions to the given state and then use that\n state's history state and recursively follow each history state's history state until there are no \n more history states to follow. If the given state does not have a history state, then the statechart\n will continue following state transition procedures.\n \n Method can be called in the following ways:\n \n {{{\n \n // With one argument. \n gotoState()\n \n // With two argument.\n gotoState(,